diff --git a/.github/actions/workspace-release/action.yml b/.github/actions/workspace-release/action.yml index f5e16fab5a..44797e2825 100644 --- a/.github/actions/workspace-release/action.yml +++ b/.github/actions/workspace-release/action.yml @@ -6,28 +6,32 @@ inputs: description: "Release mode: dry-run or publish" required: true default: "dry-run" - verify-main-head: - description: "If true, ensure the triggering SHA matches main's HEAD" + verify-branch-head: + description: "If true, ensure the triggering SHA matches the release branch HEAD" required: false default: "false" + release-branch: + description: "Branch to verify HEAD against (used when verify-branch-head is true)" + required: false + default: "main" runs: using: "composite" steps: - # Optional: guard that release happens from latest main - - name: Verify tag matches main HEAD - if: ${{ inputs.verify-main-head == 'true' }} + # Optional: guard that release happens from latest release branch + - name: Verify tag matches release branch HEAD + if: ${{ inputs.verify-branch-head == 'true' }} shell: bash run: | - git fetch origin main --depth=1 - main_sha="$(git rev-parse origin/main)" + git fetch origin ${{ inputs.release-branch }} --depth=1 + branch_sha="$(git rev-parse origin/${{ inputs.release-branch }})" tag_sha="$(git rev-parse HEAD)" - echo "main_sha=$main_sha" + echo "branch_sha=$branch_sha" echo "tag_sha=$tag_sha" - if [ "$main_sha" != "$tag_sha" ]; then - echo "::error::The release/tag commit does not match origin/main HEAD. Aborting." + if [ "$branch_sha" != "$tag_sha" ]; then + echo "::error::The release/tag commit does not match origin/${{ inputs.release-branch }} HEAD. Aborting." exit 1 fi diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 075b64f863..884a7be16a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,6 +12,7 @@ on: branches: [main, next] pull_request: types: [opened, reopened, synchronize] + merge_group: permissions: contents: read diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c7ffceecce..dee254de61 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,6 +12,7 @@ on: branches: [main, next] pull_request: types: [opened, reopened, synchronize] + merge_group: permissions: contents: read diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 44623e5428..da076ca883 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,8 @@ on: push: branches: [main, next] pull_request: - types: [opened, reopened, synchronize] + types: [opened, reopened, synchronize] + merge_group: permissions: contents: read diff --git a/.github/workflows/workspace-dry-run.yml b/.github/workflows/workspace-dry-run.yml index 874417bd91..8d23123df3 100644 --- a/.github/workflows/workspace-dry-run.yml +++ b/.github/workflows/workspace-dry-run.yml @@ -5,6 +5,7 @@ on: branches: - main - next + - 'release/**' permissions: contents: read @@ -29,5 +30,5 @@ jobs: uses: ./.github/actions/workspace-release with: mode: "dry-run" - verify-main-head: "false" + verify-branch-head: "false" # ref left blank: uses the pushed ref diff --git a/.github/workflows/workspace-publish.yml b/.github/workflows/workspace-publish.yml index 27d2737ff2..f9f6d9e8ee 100644 --- a/.github/workflows/workspace-publish.yml +++ b/.github/workflows/workspace-publish.yml @@ -19,7 +19,7 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - ref: main + ref: ${{ github.event.release.target_commitish }} - name: Authenticate with crates.io uses: rust-lang/crates-io-auth-action@v1 @@ -29,6 +29,7 @@ jobs: uses: ./.github/actions/workspace-release with: mode: "publish" - verify-main-head: "true" + verify-branch-head: "true" + release-branch: ${{ github.event.release.target_commitish }} env: CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }} diff --git a/.gitmodules b/.gitmodules index b02c269a3f..ee3e6490f2 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,3 +4,15 @@ [submodule "crates/miden-agglayer/solidity-compat/lib/agglayer-contracts"] path = crates/miden-agglayer/solidity-compat/lib/agglayer-contracts url = https://github.com/agglayer/agglayer-contracts +[submodule "crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable"] + path = crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable + url = https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git + branch = release-v4.9 +[submodule "crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts"] + path = crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts + url = https://github.com/OpenZeppelin/openzeppelin-contracts.git + branch = release-v5.0 +[submodule "crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable5"] + path = crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable5 + url = https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git + branch = release-v5.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index ed0e203523..79c438998e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ ### Features +- Introduced `AssetAmount` wrapper type for fungible asset amounts with validated construction and a maximum value of `2^63 - 2^31` ([#2658](https://github.com/0xMiden/protocol/pull/2658)). +- Made `NoteMetadataHeader` and `NoteMetadata::to_header()` public, added `NoteMetadata::from_header()` constructor, and exported `NoteMetadataHeader` from the `note` module ([#2561](https://github.com/0xMiden/protocol/pull/2561)). +- Introduce NOTE_MAX_SIZE (256 KiB) and enforce it on individual output notes ([#2205](https://github.com/0xMiden/miden-base/pull/2205), [#2651](https://github.com/0xMiden/miden-base/pull/2651)) +- Added AggLayer faucet registry to bridge account with conversion metadata, `CONFIG_AGG_BRIDGE` note for faucet registration, and FPI-based asset conversion in `bridge_out` ([#2426](https://github.com/0xMiden/miden-base/pull/2426)). - Enable `CodeBuilder` to add advice map entries to compiled scripts ([#2275](https://github.com/0xMiden/miden-base/pull/2275)). - Added `BlockNumber::MAX` constant to represent the maximum block number ([#2324](https://github.com/0xMiden/miden-base/pull/2324)). - Added single-word `Array` standard ([#2203](https://github.com/0xMiden/miden-base/pull/2203)). @@ -12,6 +16,9 @@ - Implemented verification of AggLayer deposits (claims) against GER ([#2295](https://github.com/0xMiden/miden-base/pull/2295), [#2288](https://github.com/0xMiden/miden-base/pull/2288)). - Added `SignedBlock` struct ([#2355](https://github.com/0xMiden/miden-base/pull/2235)). - Added `PackageKind` and `ProcedureExport` ([#2358](https://github.com/0xMiden/miden-base/pull/2358)). +- Added `AccountTargetNetworkNote` type and `NetworkNoteExt` trait with `is_network_note()` / `as_account_target_network_note()` helpers ([#2365](https://github.com/0xMiden/miden-base/pull/2365)). +- Changed GER storage to a map ([#2388](https://github.com/0xMiden/miden-base/pull/2388)). +- Implemented `assert_valid_ger` procedure for verifying GER against storage ([#2388](https://github.com/0xMiden/miden-base/pull/2388)). - Added `P2idNoteStorage` and `P2ideNoteStorage` ([#2389](https://github.com/0xMiden/miden-base/pull/2389)). - [BREAKING] Added `get_asset` and `get_initial_asset` kernel procedures and removed `get_balance`, `get_initial_balance` and `has_non_fungible_asset` kernel procedures ([#2369](https://github.com/0xMiden/miden-base/pull/2369)). - Added `p2id::new` MASM constructor procedure for creating P2ID notes from MASM code ([#2381](https://github.com/0xMiden/miden-base/pull/2381)). @@ -21,13 +28,42 @@ - Resolve standard note scripts directly in `TransactionExecutorHost` instead of querying the data store ([#2417](https://github.com/0xMiden/miden-base/pull/2417)). - Added `DEFAULT_TAG` constant to `miden::standards::note_tag` MASM module ([#2482](https://github.com/0xMiden/miden-base/pull/2482)). - Added `NoteExecutionHint` variant constants (`NONE`, `ALWAYS`, `AFTER_BLOCK`, `ON_BLOCK_SLOT`) to `miden::standards::note::execution_hint` MASM module ([#2493](https://github.com/0xMiden/miden-base/pull/2493)). +- Added `Ownable2Step` account component with two-step ownership transfer (`transfer_ownership`, `accept_ownership`, `renounce_ownership`) and `owner`, `nominated_owner` procedures ([#2292](https://github.com/0xMiden/miden-base/pull/2292)). +- Added PSM authentication procedures and integrated them into `AuthMultisig` ([#2527](https://github.com/0xMiden/protocol/pull/2527)). +- Added `CodeBuilder::with_warnings_as_errors()` to promote assembler warning diagnostics to errors ([#2558](https://github.com/0xMiden/protocol/pull/2558)). +- Added `MintPolicyConfig` for flexible minting policy enforcement ([#2559](https://github.com/0xMiden/protocol/pull/2559)) +- Added `MockChain::add_pending_batch()` to allow submitting user batches directly ([#2565](https://github.com/0xMiden/protocol/pull/2565)). +- Added `create_fungible_key` for construction of fungible asset keys ([#2575](https://github.com/0xMiden/protocol/pull/2575)). +- Implemented the `on_before_asset_added_to_account` asset callback ([#2571](https://github.com/0xMiden/protocol/pull/2571)). +- Implemented the `on_before_asset_added_to_note` asset callback ([#2595](https://github.com/0xMiden/protocol/pull/2595)). +- Added `InputNoteCommitment::from_parts()` for construction of input note commitments from a nullifier and optional note header ([#2588](https://github.com/0xMiden/protocol/pull/2588)). +- Added `SwapNoteStorage` for typed serialization/deserialization of SWAP note storage ([#2585](https://github.com/0xMiden/protocol/pull/2585)). +- Added `bool` schema type to the type registry and updated ACL auth component to use it for boolean config fields ([#2591](https://github.com/0xMiden/protocol/pull/2591)). +- Added `component_metadata()` to all account components to expose their metadata ([#2596](https://github.com/0xMiden/protocol/pull/2596)). +- Added `Package` support in `MockChainBuilder` & `NoteScript` ([#2502](https://github.com/0xMiden/protocol/pull/2502)). +- Added `ProgramExecutor` hooks to support DAP and other custom transaction program executors ([#2574](https://github.com/0xMiden/protocol/pull/2574)). +- [BREAKING] Changed `native_account::remove_asset` to return the asset value remaining in the vault instead of the removed value ([#2626](https://github.com/0xMiden/protocol/pull/2626)). +- Implement `TransactionEventId::event_name` and `Host::resolve_event` for better VM diagnostics during even handler failures ([#2628](https://github.com/0xMiden/protocol/pull/2628)). +- Added `FixedWidthString` for fixed-width UTF-8 string storage in `miden-standards` (`miden::standards::utils::string`). ([#2633](https://github.com/0xMiden/protocol/pull/2633)) +- Added metadata hash storage to AggLayer faucet and FPI retrieval during bridge-out leaf construction ([#2583](https://github.com/0xMiden/protocol/pull/2583)). ### Changes +- Migrated to miden-vm v0.22 and miden-crypto v0.23 ([#2644](https://github.com/0xMiden/protocol/pull/2644)). +- [BREAKING] Renamed `AccountComponent::get_procedures()` to `procedures()`, returning `impl Iterator` ([#2597](https://github.com/0xMiden/protocol/pull/2597)). +- [BREAKING] Removed `NoteAssets::add_asset`; `OutputNoteBuilder` now accumulates assets in a `Vec` and computes the commitment only when `build()` is called, avoiding rehashing on every asset addition. ([#2577](https://github.com/0xMiden/protocol/pull/2577)) +- [BREAKING] Made `supported_types` a required parameter of `AccountComponentMetadata::new()`; removed `with_supported_type`, `with_supported_types`, `with_supports_all_types`, and `with_supports_regular_types` builder methods; added `AccountType::all()` and `AccountType::regular()` helpers ([#2554](https://github.com/0xMiden/protocol/pull/2554)). +- [BREAKING] Migrated to miden-vm 0.21 and miden-crypto 0.22 ([#2508](https://github.com/0xMiden/miden-base/pull/2508)). +- [BREAKING] The stack orientation changed from big-endian to little-endian - see PR description ([#2508](https://github.com/0xMiden/miden-base/pull/2508)). +- [BREAKING] The native hash function changed from RPO256 to Poseidon2 - see PR description ([#2508](https://github.com/0xMiden/miden-base/pull/2508)). +- Introduced `StorageMapKey` and `StorageMapKeyHash` Word wrappers for type-safe storage map key handling ([#2431](https://github.com/0xMiden/miden-base/pull/2431)). +- Restructured `miden-agglayer/asm` directory to separate bridge and faucet into per-component libraries, preventing cross-component procedure exposure ([#2294](https://github.com/0xMiden/miden-base/issues/2294)). +- Prefixed standard account component names with `miden::standards::components` ([#2400](https://github.com/0xMiden/miden-base/pull/2400)). - Made kernel procedure offset constants public and replaced accessor procedures with direct constant usage ([#2375](https://github.com/0xMiden/miden-base/pull/2375)). - [BREAKING] Made `AccountComponentMetadata` a required parameter of `AccountComponent::new()`; removed `with_supported_type`, `with_supports_all_types`, and `with_metadata` methods from `AccountComponent`; simplified `AccountComponentMetadata::new()` to take just `name`; renamed `AccountComponentTemplateError` to `ComponentMetadataError` ([#2373](https://github.com/0xMiden/miden-base/pull/2373), [#2395](https://github.com/0xMiden/miden-base/pull/2395)). - Fixed MASM inline comment casing to adhere to commenting conventions ([#2398](https://github.com/0xMiden/miden-base/pull/2398)). +- [BREAKING] Removed `ProvenTransactionBuilder` in favor of `ProvenTransaction::new()` constructor ([#2567](https://github.com/0xMiden/miden-base/pull/2567)). - Removed redundant note storage item count from advice map ([#2376](https://github.com/0xMiden/miden-base/pull/2376)). - Moved `NoteExecutionHint` to `miden-standards` ([#2378](https://github.com/0xMiden/miden-base/pull/2378)). - Added `miden::protocol::auth` module with public auth event constants ([#2377](https://github.com/0xMiden/miden-base/pull/2377)). @@ -45,13 +81,40 @@ - [BREAKING] Updated note tag length to support up to 32 bits ([#2329](https://github.com/0xMiden/miden-base/pull/2329)). - [BREAKING] Moved standard note code into individual note modules ([#2363](https://github.com/0xMiden/miden-base/pull/2363)). - [BREAKING] Added `miden::standards::note_tag` module for account target note tags ([#2366](https://github.com/0xMiden/miden-base/pull/2366)). +- [BREAKING] Refactored assets in the tx kernel and `miden::protocol` from one to two words, i.e. `ASSET` becomes `ASSET_KEY` and `ASSET_VALUE` ([#2396](https://github.com/0xMiden/miden-base/pull/2396), [#2410](https://github.com/0xMiden/miden-base/pull/2410)). +- [BREAKING] Rename `miden::protocol::asset::build_fungible_asset` to `miden::protocol::asset::create_fungible_asset` ([#2410](https://github.com/0xMiden/miden-base/pull/2410)). +- [BREAKING] Rename `miden::protocol::asset::build_non_fungible_asset` to `miden::protocol::asset::create_non_fungible_asset` ([#2410](https://github.com/0xMiden/miden-base/pull/2410)). +- [BREAKING] Change the layout of fungible and non-fungible assets ([#2437](https://github.com/0xMiden/miden-base/pull/2437)). +- [BREAKING] Refactored assets in the tx kernel from one to two words, i.e. `ASSET` becomes `ASSET_KEY` and `ASSET_VALUE` ([#2396](https://github.com/0xMiden/miden-base/pull/2396)). +- Unified the underlying representation of `ExitRoot` and `SmtNode` and use type aliases ([#2387](https://github.com/0xMiden/miden-base/pull/2387)). +- [BREAKING] Moved padding to the end of `CLAIM` `NoteStorage` layout ([#2405](https://github.com/0xMiden/miden-base/pull/2405)). - [BREAKING] Consolidated authentication components ([#2390] (https://github.com/0xMiden/miden-base/pull/2390)) - [BREAKING] Refactored account ID and nonce memory and advice stack layout ([#2442](https://github.com/0xMiden/miden-base/pull/2442)). - [BREAKING] Removed `hash_account` ([#2442](https://github.com/0xMiden/miden-base/pull/2442)). - [BREAKING] Renamed `AccountHeader::commitment`, `Account::commitment` and `PartialAccount::commitment` to `to_commitment` ([#2442](https://github.com/0xMiden/miden-base/pull/2442)). - [BREAKING] Remove `BlockSigner` trait ([#2447](https://github.com/0xMiden/miden-base/pull/2447)). - Updated account schema commitment construction to accept borrowed schema iterators; added extension trait to enable `AccountBuilder::with_schema_commitment()` helper ([#2419](https://github.com/0xMiden/miden-base/pull/2419)). +- Introducing a dedicated AccountIdKey type to unify and centralize all AccountId → SMT and advice-map key conversions ([#2495](https://github.com/0xMiden/miden-base/pull/2495)). +- [BREAKING] Renamed `SchemaTypeId` to `SchemaType` ([#2494](https://github.com/0xMiden/miden-base/pull/2494)). - Updated stale `miden-base` references to `protocol` across docs, READMEs, code comments, and Cargo.toml repository URL ([#2503](https://github.com/0xMiden/protocol/pull/2503)). +- [BREAKING] Reverse the order of the transaction summary on the stack ([#2512](https://github.com/0xMiden/miden-base/pull/2512)). +- [BREAKING] Use `@auth_script` MASM attribute instead of `auth_` prefix to identify authentication procedures in account components ([#2534](https://github.com/0xMiden/protocol/pull/2534)). +- [BREAKING] Changed `TransactionId` to include fee asset in hash computation, making it commit to entire `TransactionHeader` contents. +- Explicitly use `get_native_account_active_storage_slots_ptr` in `account::set_item` and `account::set_map_item`. +- Added Ownable2Step as an Account Component ([#2572](https://github.com/0xMiden/protocol/pull/2572)) +- [BREAKING] Introduced `PrivateNoteHeader` for output notes and removed `RawOutputNote::Header` variant ([#2569](https://github.com/0xMiden/protocol/pull/2569)). +- [BREAKING] Changed `asset::create_fungible_asset` and `faucet::create_fungible_asset` signature to take `enable_callbacks` flag ([#2571](https://github.com/0xMiden/protocol/pull/2571)). +- [BREAKING] Fixed `TokenSymbol::try_from(Felt)` to reject values below `MIN_ENCODED_VALUE`; implemented `Display` for `TokenSymbol` replacing the fallible `to_string()` method; removed `Default` derive ([#2464](https://github.com/0xMiden/protocol/issues/2464)). +- Moved `AccountSchemaCommitment` component into a sub-module ([#2603](https://github.com/0xMiden/protocol/pull/2603)). +- [BREAKING] `AssetVault::remove_asset` returns the asset value remaining in the vault `Option` rather than the removed value `Asset` ([#2626](https://github.com/0xMiden/protocol/pull/2626)). +- [BREAKING] `miden::protocol::faucet::burn` no longer returns the burnt asset value ([#2626](https://github.com/0xMiden/protocol/pull/2626)). +- Fixed overlap in initial and active account storage slot memory region ([#2557](https://github.com/0xMiden/protocol/pull/2557)). +- Fixed link map entry pointer validation bypass ([#2556](https://github.com/0xMiden/protocol/pull/2556)). +- Added foreign account ID assertion in `account::load_foreign_account` ([#2560](https://github.com/0xMiden/protocol/pull/2560)). + +### Fixes + +- Fixed `PartialAccountTree::track_account` rejecting provably-empty leaves in sparse trees by handling `SmtLeaf::Empty` correctly ([#2598](https://github.com/0xMiden/protocol/pull/2598)). ## 0.13.3 (2026-01-27) diff --git a/Cargo.lock b/Cargo.lock index ed558bfe4e..7164559df0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -59,10 +59,79 @@ dependencies = [ ] [[package]] -name = "allocator-api2" -version = "0.2.21" +name = "alloy-primitives" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3b431b4e72cd8bd0ec7a50b4be18e73dab74de0dba180eef171055e5d5926e" +dependencies = [ + "bytes", + "cfg-if", + "const-hex", + "derive_more", + "itoa", + "paste", + "ruint", + "rustc-hash", + "sha3", +] + +[[package]] +name = "alloy-sol-macro" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab81bab693da9bb79f7a95b64b394718259fdd7e41dceeced4cad57cb71c4f6a" +dependencies = [ + "alloy-sol-macro-expander", + "alloy-sol-macro-input", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "alloy-sol-macro-expander" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "489f1620bb7e2483fb5819ed01ab6edc1d2f93939dce35a5695085a1afd1d699" +dependencies = [ + "alloy-sol-macro-input", + "const-hex", + "heck", + "indexmap", + "proc-macro-error2", + "proc-macro2", + "quote", + "sha3", + "syn 2.0.117", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-macro-input" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56cef806ad22d4392c5fc83cf8f2089f988eb99c7067b4e0c6f1971fc1cca318" +dependencies = [ + "const-hex", + "dunce", + "heck", + "macro-string", + "proc-macro2", + "quote", + "syn 2.0.117", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-types" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" +checksum = "64612d29379782a5dde6f4b6570d9c756d734d760c0c94c254d361e678a6591f" +dependencies = [ + "alloy-primitives", + "alloy-sol-macro", +] [[package]] name = "anes" @@ -87,9 +156,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" [[package]] name = "anstyle-parse" @@ -106,7 +175,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -117,17 +186,14 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.61.2", + "windows-sys", ] [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" -dependencies = [ - "backtrace", -] +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "arrayref" @@ -177,15 +243,6 @@ dependencies = [ "rustc-demangle", ] -[[package]] -name = "backtrace-ext" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" -dependencies = [ - "backtrace", -] - [[package]] name = "base16ct" version = "0.2.0" @@ -234,6 +291,15 @@ dependencies = [ "tokio", ] +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bit-set" version = "0.8.0" @@ -257,9 +323,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" [[package]] name = "blake3" @@ -286,15 +352,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytemuck" -version = "1.24.0" +version = "1.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" [[package]] name = "byteorder" @@ -302,6 +368,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + [[package]] name = "cast" version = "0.3.0" @@ -310,9 +382,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.55" +version = "1.2.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29" +checksum = "7a0dd1ca384932ff3641c8718a02769f1698e7563dc6974ffd03346116310423" dependencies = [ "find-msvc-tools", "jobserver", @@ -390,18 +462,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.56" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75ca66430e33a14957acc24c5077b503e7d374151b2b4b3a10c83b4ceb4be0e" +checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.5.56" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793207c7fa6300a0608d1080b858e5fdbe713cdc1c8db9fb17777d8a13e63df0" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" dependencies = [ "anstyle", "clap_lex", @@ -409,9 +481,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.7" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" [[package]] name = "color-eyre" @@ -442,9 +514,21 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.4" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570" + +[[package]] +name = "const-hex" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +checksum = "531185e432bb31db1ecda541e9e7ab21468d4d844ad7505e0546a49b4945d49b" +dependencies = [ + "cfg-if", + "cpufeatures", + "proptest", + "serde_core", +] [[package]] name = "const-oid" @@ -458,6 +542,15 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "cpufeatures" version = "0.2.17" @@ -605,7 +698,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -642,10 +735,12 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case", "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.114", + "syn 2.0.117", + "unicode-xid", ] [[package]] @@ -662,9 +757,15 @@ dependencies = [ [[package]] name = "dissimilar" -version = "1.0.10" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aeda16ab4059c5fd2a83f2b9c9e9c981327b18aa8e3b313f7e6563799d4f093e" + +[[package]] +name = "dunce" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "ecdsa" @@ -732,18 +833,18 @@ dependencies = [ [[package]] name = "ena" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +checksum = "eabffdaee24bd1bf95c5ef7cec31260444317e72ea56c4c91750e8b7ee58d5f1" dependencies = [ "log", ] [[package]] name = "env_filter" -version = "0.1.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" +checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f" dependencies = [ "log", "regex", @@ -751,9 +852,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" +checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d" dependencies = [ "anstream", "anstyle", @@ -779,7 +880,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -795,7 +896,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -877,24 +978,24 @@ dependencies = [ [[package]] name = "foldhash" -version = "0.2.0" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "fs-err" -version = "3.2.2" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf68cef89750956493a66a10f512b9e58d9db21f2a573c079c0bdf1207a54a7" +checksum = "73fde052dbfc920003cfd2c8e2c6e6d4cc7c1091538c3a24226cec0665ab08c0" dependencies = [ "autocfg", ] [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -906,9 +1007,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -916,38 +1017,38 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-timer" @@ -957,16 +1058,15 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-core", "futures-macro", "futures-sink", "futures-task", "pin-project-lite", - "pin-utils", "slab", ] @@ -1018,11 +1118,24 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "r-efi", + "r-efi 5.3.0", "wasip2", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "wasip2", + "wasip3", +] + [[package]] name = "gimli" version = "0.28.1" @@ -1059,18 +1172,25 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.16.1" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ - "allocator-api2", - "equivalent", "foldhash", - "rayon", - "serde", - "serde_core", ] +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.5.2" @@ -1101,6 +1221,12 @@ dependencies = [ "digest", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "indenter" version = "0.3.4" @@ -1114,7 +1240,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.16.1", + "serde", + "serde_core", ] [[package]] @@ -1152,15 +1280,9 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.61.2", + "windows-sys", ] -[[package]] -name = "is_ci" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" - [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -1202,9 +1324,9 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jiff" -version = "0.2.18" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" +checksum = "1a3546dc96b6d42c5f24902af9e2538e82e39ad350b0c766eb3fbf2d8f3d8359" dependencies = [ "jiff-static", "log", @@ -1215,13 +1337,13 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.18" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" +checksum = "2a8c8b344124222efd714b73bb41f8b5120b27a7cc1c75593a6ff768d9d05aa4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -1236,9 +1358,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.85" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" dependencies = [ "once_cell", "wasm-bindgen", @@ -1260,9 +1382,9 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" dependencies = [ "cpufeatures", ] @@ -1303,11 +1425,17 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "libc" -version = "0.2.180" +version = "0.2.183" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" [[package]] name = "libm" @@ -1317,15 +1445,9 @@ checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" [[package]] name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" - -[[package]] -name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "lock_api" @@ -1352,7 +1474,18 @@ dependencies = [ "generator", "scoped-tls", "tracing", - "tracing-subscriber 0.3.22", + "tracing-subscriber 0.3.23", +] + +[[package]] +name = "macro-string" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", ] [[package]] @@ -1366,23 +1499,24 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.6" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memmap2" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" +checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" dependencies = [ "libc", ] [[package]] name = "miden-agglayer" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ + "alloy-sol-types", "fs-err", "miden-agglayer", "miden-assembly", @@ -1392,28 +1526,32 @@ dependencies = [ "miden-protocol", "miden-standards", "miden-utils-sync", + "primitive-types", "regex", + "serde", + "serde_json", + "thiserror", "walkdir", ] [[package]] name = "miden-air" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab2f1db9cdbd5da3eaf07fa0a8122d27b575f96b0699388c98f6c0e468cb9c1f" +checksum = "5322d00bef8b19f4cd3415da2533a87c8860c7d9b80043d6cce0f184b40c5fff" dependencies = [ "miden-core", + "miden-crypto", "miden-utils-indexing", "thiserror", - "winter-air", - "winter-prover", + "tracing", ] [[package]] name = "miden-assembly" -version = "0.20.6" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2395b2917aea613a285d3425d1ca07e6c45442e2b34febdea2081db555df62fc" +checksum = "7ece22da0cbf350e4a2939a07eaa3200445e42e47ce1b1ee6538723b6b40a4d4" dependencies = [ "env_logger", "log", @@ -1426,9 +1564,9 @@ dependencies = [ [[package]] name = "miden-assembly-syntax" -version = "0.20.6" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f9bed037d137f209b9e7b28811ec78c0536b3f9259d6f4ceb5823c87513b346" +checksum = "d84a0e14ce66e76497a6771f3e360eb85557f2417ea22db279d54c1238ffafde" dependencies = [ "aho-corasick", "env_logger", @@ -1450,7 +1588,7 @@ dependencies = [ [[package]] name = "miden-block-prover" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "miden-protocol", "thiserror", @@ -1458,9 +1596,9 @@ dependencies = [ [[package]] name = "miden-core" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2716bb01f07f0b19398e3d9785e23a724b89aef64d614a9073c1d44c6898a9a9" +checksum = "7bf4f5601b0d669aa125cce3bba4b98f2c8df729e2d53e66777429ac5f53e228" dependencies = [ "derive_more", "itertools 0.14.0", @@ -1469,20 +1607,19 @@ dependencies = [ "miden-formatting", "miden-utils-core-derive", "miden-utils-indexing", + "miden-utils-sync", "num-derive", "num-traits", "proptest", "proptest-derive", "thiserror", - "winter-math", - "winter-utils", ] [[package]] name = "miden-core-lib" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac97f4fb334ee842663f99f33677beacc7bdf4b7d4eeff419c2cd98a5a68bfa" +checksum = "82595fabb062315c32f6fc11c31755d3e5c6f8bc8c67d35154a067397d65b1de" dependencies = [ "env_logger", "fs-err", @@ -1491,15 +1628,14 @@ dependencies = [ "miden-crypto", "miden-processor", "miden-utils-sync", - "sha2", "thiserror", ] [[package]] name = "miden-crypto" -version = "0.19.4" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e28b6e110f339c2edc2760a8cb94863f0a055ee658a49bc90c8560eff2feef4" +checksum = "0ed0a034a460e27723dcfdf25effffab84331c3b46b13e7a1bd674197cc71bfe" dependencies = [ "blake3", "cc", @@ -1508,42 +1644,51 @@ dependencies = [ "ed25519-dalek", "flume", "glob", - "hashbrown", "hkdf", "k256", "miden-crypto-derive", + "miden-field", + "miden-serde-utils", "num", "num-complex", - "rand", + "p3-blake3", + "p3-challenger", + "p3-dft", + "p3-goldilocks", + "p3-keccak", + "p3-matrix", + "p3-maybe-rayon", + "p3-miden-lifted-stark", + "p3-symmetric", + "p3-util", + "rand 0.9.2", "rand_chacha", "rand_core 0.9.5", "rand_hc", "rayon", + "serde", "sha2", "sha3", "subtle", "thiserror", - "winter-crypto", - "winter-math", - "winter-utils", "x25519-dalek", ] [[package]] name = "miden-crypto-derive" -version = "0.19.4" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40e95b9c7c99ed6bbf073d9e02721d812dedd2c195019c0a0e0a3dbb9cbf034" +checksum = "e8bf6ebde028e79bcc61a3632d2f375a5cc64caa17d014459f75015238cb1e08" dependencies = [ "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "miden-debug-types" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b421786850ce05627355ee616c4a5fdc4a9ad1591859ede5e5564ab74aa4abd2" +checksum = "c9ef08bafef275f0d6a15108108b3f6df6642772e0a1c05e102cb7e96841e888" dependencies = [ "memchr", "miden-crypto", @@ -1557,6 +1702,24 @@ dependencies = [ "thiserror", ] +[[package]] +name = "miden-field" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38011348f4fb4c9e5ce1f471203d024721c00e3b60a91aa91aaefe6738d8b5ea" +dependencies = [ + "miden-serde-utils", + "num-bigint", + "p3-challenger", + "p3-field", + "p3-goldilocks", + "paste", + "rand 0.10.0", + "serde", + "subtle", + "thiserror", +] + [[package]] name = "miden-formatting" version = "0.1.1" @@ -1568,13 +1731,14 @@ dependencies = [ [[package]] name = "miden-mast-package" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "169025a61c2ca2e8a0f53f20a7bdcbdd1f8e34f528676137208bff64944652bb" +checksum = "f9b24d09fda64e0751f943ac616643342b05a47d626e2ee0040b902eff3c924e" dependencies = [ "derive_more", "miden-assembly-syntax", "miden-core", + "miden-debug-types", "thiserror", ] @@ -1584,25 +1748,19 @@ version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eef536978f24a179d94fa2a41e4f92b28e7d8aab14b8d23df28ad2a3d7098b20" dependencies = [ - "backtrace", - "backtrace-ext", "cfg-if", "futures", "indenter", "lazy_static", "miden-miette-derive", - "owo-colors 4.2.3", + "owo-colors 4.3.0", "regex", "rustc_version 0.2.3", "rustversion", "serde_json", "spin 0.9.8", "strip-ansi-escapes", - "supports-color", - "supports-hyperlinks", - "supports-unicode", - "syn 2.0.114", - "terminal_size", + "syn 2.0.117", "textwrap", "thiserror", "trybuild", @@ -1617,14 +1775,14 @@ checksum = "86a905f3ea65634dd4d1041a4f0fd0a3e77aa4118341d265af1a94339182222f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "miden-processor" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18a6a5eebe64e81a29be6321ee8f4478c6bfaf619b7689825884e8cd308c044" +checksum = "ba53ff06ef0affa0c3fb13e7e2ef5bde99f96eebcec8c360c6658050480ef676" dependencies = [ "itertools 0.14.0", "miden-air", @@ -1637,12 +1795,11 @@ dependencies = [ "thiserror", "tokio", "tracing", - "winter-prover", ] [[package]] name = "miden-protocol" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "anyhow", "assert_matches", @@ -1651,7 +1808,6 @@ dependencies = [ "criterion 0.5.1", "fs-err", "getrandom 0.3.4", - "miden-air", "miden-assembly", "miden-assembly-syntax", "miden-core", @@ -1664,7 +1820,7 @@ dependencies = [ "miden-utils-sync", "miden-verifier", "pprof", - "rand", + "rand 0.9.2", "rand_chacha", "rand_xoshiro", "regex", @@ -1675,37 +1831,49 @@ dependencies = [ "thiserror", "toml", "walkdir", - "winter-air", - "winter-rand-utils", ] [[package]] name = "miden-protocol-macros" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "miden-protocol", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "miden-prover" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83070f0ca1a08235362e990238b6487191f814054aaebcc40883a073fdcd18f9" +checksum = "15462425359e87540d92e277cf1174a85a174ca433bd63d27286f65ab318f2d4" dependencies = [ + "bincode", "miden-air", + "miden-core", + "miden-crypto", "miden-debug-types", "miden-processor", + "serde", + "thiserror", + "tokio", "tracing", - "winter-maybe-async", - "winter-prover", +] + +[[package]] +name = "miden-serde-utils" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff78082e9b4ca89863e68da01b35f8a4029ee6fd912e39fa41fde4273a7debab" +dependencies = [ + "p3-field", + "p3-goldilocks", ] [[package]] name = "miden-standards" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "anyhow", "assert_matches", @@ -1716,7 +1884,7 @@ dependencies = [ "miden-processor", "miden-protocol", "miden-standards", - "rand", + "rand 0.9.2", "regex", "thiserror", "walkdir", @@ -1724,7 +1892,7 @@ dependencies = [ [[package]] name = "miden-testing" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "anyhow", "assert_matches", @@ -1741,20 +1909,18 @@ dependencies = [ "miden-tx", "miden-tx-batch-prover", "primitive-types", - "rand", + "rand 0.9.2", "rand_chacha", "rstest", "serde", "serde_json", "thiserror", "tokio", - "winter-rand-utils", - "winterfell", ] [[package]] name = "miden-tx" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "anyhow", "assert_matches", @@ -1771,7 +1937,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" -version = "0.14.0" +version = "0.14.0-beta.2" dependencies = [ "miden-protocol", "miden-tx", @@ -1779,9 +1945,9 @@ dependencies = [ [[package]] name = "miden-utils-core-derive" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9fc6d350fb9ad44797e8d0a1feaacaa6ee4079ef752d9ababc101ffc40ec354" +checksum = "477db426fc31f666d7e65b0cc907fe431d36d88d611a0594cf266104eb168b4c" dependencies = [ "proc-macro2", "quote", @@ -1790,9 +1956,9 @@ dependencies = [ [[package]] name = "miden-utils-diagnostics" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af2462fb2e750247a56264eddf40e2e1c8d96ff9379abe73acbcbe81e530e1d5" +checksum = "785c1ec4ad9994100b117b8eab8c453dcc35d3d168e4f72ac818efb700abe7b1" dependencies = [ "miden-crypto", "miden-debug-types", @@ -1803,44 +1969,51 @@ dependencies = [ [[package]] name = "miden-utils-indexing" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57046b5c263b78e7fa5a6e328ca852e6319cf844faa26fbdcbb128ec555deb2a" +checksum = "46cec00c8cf32ec46df7542fb9ea15fbe7a5149920ef97776a4f4bc3a563e8de" dependencies = [ + "miden-crypto", "thiserror", ] [[package]] name = "miden-utils-sync" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d3e129b62099672a1ffc012ab2e26ee7f2b35e4ca18ca1f726b88c53546ddd" +checksum = "9529c1c173506f30d3949f7a54b65f1eb318098e37ed5730a1bb9027eee2fa4b" dependencies = [ "lock_api", "loom", + "once_cell", "parking_lot", ] [[package]] name = "miden-verifier" -version = "0.20.3" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe033af062937938ded511e5238db3bf8e0c1a30205850d62fb23271b3c96f85" +checksum = "997c842047ffa2d011eb65bf638a3135b2d52bce5b20770fcc6040f1b48c624a" dependencies = [ + "bincode", "miden-air", "miden-core", + "miden-crypto", + "serde", "thiserror", "tracing", - "winter-verifier", ] [[package]] name = "midenc-hir-type" -version = "0.4.3" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d4cfab04baffdda3fb9eafa5f873604059b89a1699aa95e4f1057397a69f0b5" +checksum = "2eb29d7c049fb69373c7e775e3d4411e63e4ee608bc43826282ba62c6ec9f891" dependencies = [ "miden-formatting", + "miden-serde-utils", + "serde", + "serde_repr", "smallvec", "thiserror", ] @@ -1886,7 +2059,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -1930,7 +2103,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -1995,9 +2168,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.21.3" +version = "1.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" [[package]] name = "once_cell_polyfill" @@ -2025,91 +2198,400 @@ checksum = "2386b4ebe91c2f7f51082d4cefa145d030e33a1842a96b12e4885cc3c01f7a55" [[package]] name = "owo-colors" -version = "4.2.3" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" +checksum = "d211803b9b6b570f68772237e415a029d5a50c65d382910b879fb19d3271f94d" [[package]] -name = "parking_lot" -version = "0.12.5" +name = "p3-air" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +checksum = "9ebc58ec27a174420348b3f04dba836fa2e5b5fe8df74601087417352757c643" dependencies = [ - "lock_api", - "parking_lot_core", + "p3-field", + "p3-matrix", + "tracing", ] [[package]] -name = "parking_lot_core" -version = "0.9.12" +name = "p3-blake3" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +checksum = "b3cacb38c29fbee71fe3e5c6c0a1073632e46dc3e93fbdc50ab4e4fac137b525" dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", + "blake3", + "p3-symmetric", + "p3-util", ] [[package]] -name = "paste" -version = "1.0.15" +name = "p3-challenger" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +checksum = "af9bbcb18fe90271668259aacfc43455e328673c2b5c926cff0663edc8653e4d" +dependencies = [ + "p3-field", + "p3-maybe-rayon", + "p3-monty-31", + "p3-symmetric", + "p3-util", + "tracing", +] [[package]] -name = "petgraph" -version = "0.7.1" +name = "p3-commit" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +checksum = "14d07b50c6f6d3bc89ed7c54ae0c569fb4caaa58263fd389dc02fb1b0a6378fa" dependencies = [ - "fixedbitset", - "indexmap", + "itertools 0.14.0", + "p3-field", + "p3-matrix", + "p3-util", + "serde", ] [[package]] -name = "phf_shared" -version = "0.11.3" +name = "p3-dft" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +checksum = "17e7ba0dc20be075eab3f88f0cb820a0901f86218a1c46134e7c817d41597989" dependencies = [ - "siphasher", + "itertools 0.14.0", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-util", + "spin 0.10.0", + "tracing", ] [[package]] -name = "pin-project-lite" -version = "0.2.16" +name = "p3-field" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "9b8533e6c2f4d0cc61fd2ae5299bb83316898e535f47291808d37e4d666ba088" +dependencies = [ + "itertools 0.14.0", + "num-bigint", + "p3-maybe-rayon", + "p3-util", + "paste", + "rand 0.10.0", + "serde", + "tracing", +] [[package]] -name = "pin-utils" -version = "0.1.0" +name = "p3-goldilocks" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +checksum = "d8102a8c85acee1f896c3764bef5fac908e6026dadfc557c185294970cce0746" +dependencies = [ + "num-bigint", + "p3-challenger", + "p3-dft", + "p3-field", + "p3-mds", + "p3-poseidon1", + "p3-poseidon2", + "p3-symmetric", + "p3-util", + "paste", + "rand 0.10.0", + "serde", +] [[package]] -name = "pkcs8" -version = "0.10.2" +name = "p3-keccak" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +checksum = "b65d30dd586d2855906a01c3414c155c2d564f6677d1b51f04186dcac080f757" dependencies = [ - "der", - "spki", + "p3-symmetric", + "p3-util", + "tiny-keccak", ] [[package]] -name = "plotters" -version = "0.3.7" +name = "p3-matrix" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +checksum = "72bb78444459155c2e4711d71abbfef7b04cc2ba1fa83751ccab241b01957095" dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", + "itertools 0.14.0", + "p3-field", + "p3-maybe-rayon", + "p3-util", + "rand 0.10.0", + "serde", + "tracing", +] + +[[package]] +name = "p3-maybe-rayon" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a0a54345917f500130a9986fa5ff9ecbc26f0c6313080b35b713e26ddc8053" +dependencies = [ + "rayon", +] + +[[package]] +name = "p3-mds" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cd514bf3e9bf9f1b7db2db96e5bd2972d9963dd62430de1e193d74522ae96a6" +dependencies = [ + "p3-dft", + "p3-field", + "p3-symmetric", + "p3-util", + "rand 0.10.0", +] + +[[package]] +name = "p3-miden-lifted-air" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5c31c65fdc88952d7b301546add9670676e5b878aa0066dd929f107c203b006" +dependencies = [ + "p3-air", + "p3-field", + "p3-matrix", + "p3-util", + "thiserror", +] + +[[package]] +name = "p3-miden-lifted-fri" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab9932f1b0a16609a45cd4ee10a4d35412728bc4b38837c7979d7c85d8dcc9fc" +dependencies = [ + "p3-challenger", + "p3-commit", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-miden-lmcs", + "p3-miden-transcript", + "p3-util", + "rand 0.10.0", + "thiserror", + "tracing", +] + +[[package]] +name = "p3-miden-lifted-stark" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3956ab7270c3cdd53ca9796d39ae1821984eb977415b0672110f9666bff5d8" +dependencies = [ + "p3-challenger", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-miden-lifted-air", + "p3-miden-lifted-fri", + "p3-miden-lmcs", + "p3-miden-stateful-hasher", + "p3-miden-transcript", + "p3-util", + "thiserror", + "tracing", +] + +[[package]] +name = "p3-miden-lmcs" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c46791c983e772136db3d48f102431457451447abb9087deb6c8ce3c1efc86" +dependencies = [ + "p3-commit", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-miden-stateful-hasher", + "p3-miden-transcript", + "p3-symmetric", + "p3-util", + "rand 0.10.0", + "serde", + "thiserror", + "tracing", +] + +[[package]] +name = "p3-miden-stateful-hasher" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec47a9d9615eb3d9d2a59b00d19751d9ad85384b55886827913d680d912eac6a" +dependencies = [ + "p3-field", + "p3-symmetric", +] + +[[package]] +name = "p3-miden-transcript" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c565647487e4a949f67e6f115b0391d6cb82ac8e561165789939bab23d0ae7" +dependencies = [ + "p3-challenger", + "p3-field", + "serde", + "thiserror", +] + +[[package]] +name = "p3-monty-31" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d9340a650f07a6cd42a4e877017ba7b206df87fe50dfc3cf110f01a3c370bd1" +dependencies = [ + "itertools 0.14.0", + "num-bigint", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-mds", + "p3-poseidon1", + "p3-poseidon2", + "p3-symmetric", + "p3-util", + "paste", + "rand 0.10.0", + "serde", + "spin 0.10.0", + "tracing", +] + +[[package]] +name = "p3-poseidon1" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd56ae3a51ded1b77f7b1b21d0b157ae82b9d5ca8f2cba347c0b821fe771a79" +dependencies = [ + "p3-field", + "p3-symmetric", + "rand 0.10.0", +] + +[[package]] +name = "p3-poseidon2" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "858aa1c33ec983dfbb8cfc553a213de19d8fde96485e54e6e952b9ac5e70bd4e" +dependencies = [ + "p3-field", + "p3-mds", + "p3-symmetric", + "p3-util", + "rand 0.10.0", +] + +[[package]] +name = "p3-symmetric" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a9a3b20bb8104e52d45219a78d80654c8ac6a4781be0eaa3f3e999f5ae4b9b2" +dependencies = [ + "itertools 0.14.0", + "p3-field", + "p3-util", + "serde", +] + +[[package]] +name = "p3-util" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f24495d9cd64693165a9f1b3da0758395ad6d25d2d44dd740bdb34c2bce0c53" +dependencies = [ + "rayon", + "serde", + "transpose", +] + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", ] [[package]] @@ -2140,15 +2622,15 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "portable-atomic-util" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +checksum = "091397be61a01d4be58e7841595bd4bfedb15f1cd54977d79b8271e94ed799a3" dependencies = [ "portable-atomic", ] @@ -2191,6 +2673,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.117", +] + [[package]] name = "primitive-types" version = "0.14.0" @@ -2203,13 +2695,35 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" dependencies = [ "toml_edit", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "proc-macro2" version = "1.0.106" @@ -2221,13 +2735,13 @@ dependencies = [ [[package]] name = "proptest" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" +checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "num-traits", - "rand", + "rand 0.9.2", "rand_chacha", "rand_xorshift", "regex-syntax", @@ -2242,7 +2756,7 @@ checksum = "fb6dc647500e84a25a85b100e76c85b8ace114c209432dc174f20aac11d4ed6c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2256,9 +2770,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.44" +version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" dependencies = [ "proc-macro2", ] @@ -2269,6 +2783,21 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core 0.6.4", +] + [[package]] name = "rand" version = "0.9.2" @@ -2279,6 +2808,15 @@ dependencies = [ "rand_core 0.9.5", ] +[[package]] +name = "rand" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8" +dependencies = [ + "rand_core 0.10.0", +] + [[package]] name = "rand_chacha" version = "0.9.0" @@ -2307,6 +2845,12 @@ dependencies = [ "getrandom 0.3.4", ] +[[package]] +name = "rand_core" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba" + [[package]] name = "rand_hc" version = "0.3.2" @@ -2360,14 +2904,14 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -2377,9 +2921,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", @@ -2388,9 +2932,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "relative-path" @@ -2410,9 +2954,9 @@ dependencies = [ [[package]] name = "rgb" -version = "0.8.52" +version = "0.8.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" +checksum = "47b34b781b31e5d73e9fbc8689c70551fd1ade9a19e3e28cfec8580a79290cc4" dependencies = [ "bytemuck", ] @@ -2442,16 +2986,43 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.114", + "syn 2.0.117", "unicode-ident", ] +[[package]] +name = "ruint" +version = "1.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" +dependencies = [ + "proptest", + "rand 0.8.5", + "rand 0.9.2", + "ruint-macro", + "serde_core", + "valuable", + "zeroize", +] + +[[package]] +name = "ruint-macro" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" + [[package]] name = "rustc-demangle" version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d" +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + [[package]] name = "rustc_version" version = "0.2.3" @@ -2472,28 +3043,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustix" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "errno", "libc", - "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", + "linux-raw-sys", + "windows-sys", ] [[package]] @@ -2589,7 +3147,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2606,6 +3164,17 @@ dependencies = [ "zmij", ] +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "serde_spanned" version = "1.0.4" @@ -2669,15 +3238,18 @@ checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] [[package]] name = "smawk" @@ -2731,6 +3303,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb" +[[package]] +name = "strength_reduce" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82" + [[package]] name = "string_cache" version = "0.8.9" @@ -2758,32 +3336,11 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" -[[package]] -name = "supports-color" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" -dependencies = [ - "is_ci", -] - -[[package]] -name = "supports-hyperlinks" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91" - -[[package]] -name = "supports-unicode" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" - [[package]] name = "symbolic-common" -version = "12.17.1" +version = "12.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520cf51c674f8b93d533f80832babe413214bb766b6d7cb74ee99ad2971f8467" +checksum = "751a2823d606b5d0a7616499e4130a516ebd01a44f39811be2b9600936509c23" dependencies = [ "debugid", "memmap2", @@ -2793,9 +3350,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.17.1" +version = "12.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f0de2ee0ffa2641e17ba715ad51d48b9259778176517979cb38b6aa86fa7425" +checksum = "79b237cfbe320601dd24b4ac817a5b68bb28f5508e33f08d42be0682cadc8ac9" dependencies = [ "rustc-demangle", "symbolic-common", @@ -2814,15 +3371,27 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.114" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "syn-solidity" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53f425ae0b12e2f5ae65542e00898d500d4d318b4baf09f40fd0d410454e9947" +dependencies = [ + "paste", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "target-triple" version = "1.0.0" @@ -2831,15 +3400,15 @@ checksum = "591ef38edfb78ca4771ee32cf494cb8771944bee237a9b91fc9c1424ac4b777b" [[package]] name = "tempfile" -version = "3.24.0" +version = "3.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" dependencies = [ "fastrand", - "getrandom 0.3.4", + "getrandom 0.4.2", "once_cell", - "rustix 1.1.3", - "windows-sys 0.61.2", + "rustix", + "windows-sys", ] [[package]] @@ -2848,7 +3417,7 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8c27177b12a6399ffc08b98f76f7c9a1f4fe9fc967c784c5a071fa8d93cf7e1" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -2860,16 +3429,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "terminal_size" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" -dependencies = [ - "rustix 0.38.44", - "windows-sys 0.48.0", -] - [[package]] name = "textwrap" version = "0.16.2" @@ -2898,7 +3457,7 @@ checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2910,6 +3469,15 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -2922,9 +3490,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.49.0" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" dependencies = [ "pin-project-lite", "tokio-macros", @@ -2932,13 +3500,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2965,9 +3533,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.11+spec-1.1.0" +version = "1.0.7+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3afc9a848309fe1aaffaed6e1546a7a14de1f935dc9d89d32afd9a44bab7c46" +checksum = "dd28d57d8a6f6e458bc0b8784f8fdcc4b99a437936056fa122cb234f18656a96" dependencies = [ "indexmap", "serde_core", @@ -2980,18 +3548,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.5+spec-1.1.0" +version = "1.0.1+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +checksum = "9b320e741db58cac564e26c607d3cc1fdc4a88fd36c879568c07856ed83ff3e9" dependencies = [ "serde_core", ] [[package]] name = "toml_edit" -version = "0.23.10+spec-1.0.0" +version = "0.25.5+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +checksum = "8ca1a40644a28bce036923f6a431df0b34236949d111cc07cb6dca830c9ef2e1" dependencies = [ "indexmap", "toml_datetime", @@ -3001,18 +3569,18 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.6+spec-1.1.0" +version = "1.0.10+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "7df25b4befd31c4816df190124375d5a20c6b6921e2cad937316de3fccd63420" dependencies = [ "winnow", ] [[package]] name = "toml_writer" -version = "1.0.6+spec-1.1.0" +version = "1.0.7+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" +checksum = "f17aaa1c6e3dc22b1da4b6bba97d066e354c7945cac2f7852d4e4e7ca7a6b56d" [[package]] name = "tracing" @@ -3033,7 +3601,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -3080,9 +3648,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.22" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" dependencies = [ "matchers", "nu-ansi-term", @@ -3096,11 +3664,21 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "transpose" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad61aed86bc3faea4300c7aee358b4c6d0c8d6ccc36524c96e4c92ccf26e77e" +dependencies = [ + "num-integer", + "strength_reduce", +] + [[package]] name = "trybuild" -version = "1.0.114" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17e807bff86d2a06b52bca4276746584a78375055b6e45843925ce2802b335" +checksum = "47c635f0191bd3a2941013e5062667100969f8c4e9cd787c14f977265d73616e" dependencies = [ "dissimilar", "glob", @@ -3138,9 +3716,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" [[package]] name = "unicode-linebreak" @@ -3148,6 +3726,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + [[package]] name = "unicode-width" version = "0.1.14" @@ -3184,9 +3768,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.20.0" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" dependencies = [ "js-sys", "wasm-bindgen", @@ -3238,11 +3822,20 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" dependencies = [ "cfg-if", "once_cell", @@ -3253,9 +3846,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3263,31 +3856,65 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap", + "semver 1.0.27", +] + [[package]] name = "web-sys" -version = "0.3.85" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" dependencies = [ "js-sys", "wasm-bindgen", @@ -3315,7 +3942,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -3339,24 +3966,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.61.2" @@ -3366,255 +3975,103 @@ dependencies = [ "windows-link", ] -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - [[package]] name = "winnow" -version = "0.7.14" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +checksum = "a90e88e4667264a994d34e6d1ab2d26d398dcdca8b7f52bec8668957517fc7d8" dependencies = [ "memchr", ] [[package]] -name = "winter-air" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef01227f23c7c331710f43b877a8333f5f8d539631eea763600f1a74bf018c7c" -dependencies = [ - "libm", - "winter-crypto", - "winter-fri", - "winter-math", - "winter-utils", -] - -[[package]] -name = "winter-crypto" -version = "0.13.1" +name = "wit-bindgen" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cdb247bc142438798edb04067ab72a22cf815f57abbd7b78a6fa986fc101db8" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" dependencies = [ - "blake3", - "sha3", - "winter-math", - "winter-utils", + "wit-bindgen-rust-macro", ] [[package]] -name = "winter-fri" -version = "0.13.1" +name = "wit-bindgen-core" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd592b943f9d65545683868aaf1b601eb66e52bfd67175347362efff09101d3a" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" dependencies = [ - "winter-crypto", - "winter-math", - "winter-utils", + "anyhow", + "heck", + "wit-parser", ] [[package]] -name = "winter-math" -version = "0.13.1" +name = "wit-bindgen-rust" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aecfb48ee6a8b4746392c8ff31e33e62df8528a3b5628c5af27b92b14aef1ea" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" dependencies = [ - "winter-utils", + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn 2.0.117", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", ] [[package]] -name = "winter-maybe-async" -version = "0.13.1" +name = "wit-bindgen-rust-macro" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d31a19dae58475d019850e25b0170e94b16d382fbf6afee9c0e80fdc935e73e" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "winter-prover" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84cc631ed56cd39b78ef932c1ec4060cc6a44d114474291216c32f56655b3048" -dependencies = [ - "tracing", - "winter-air", - "winter-crypto", - "winter-fri", - "winter-math", - "winter-maybe-async", - "winter-utils", -] - -[[package]] -name = "winter-rand-utils" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4ff3b651754a7bd216f959764d0a5ab6f4b551c9a3a08fb9ccecbed594b614a" -dependencies = [ - "rand", - "winter-utils", -] - -[[package]] -name = "winter-utils" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9951263ef5317740cd0f49e618db00c72fabb70b75756ea26c4d5efe462c04dd" -dependencies = [ - "rayon", + "syn 2.0.117", + "wit-bindgen-core", + "wit-bindgen-rust", ] [[package]] -name = "winter-verifier" -version = "0.13.1" +name = "wit-component" +version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0425ea81f8f703a1021810216da12003175c7974a584660856224df04b2e2fdb" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" dependencies = [ - "winter-air", - "winter-crypto", - "winter-fri", - "winter-math", - "winter-utils", + "anyhow", + "bitflags 2.11.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", ] [[package]] -name = "winterfell" -version = "0.13.1" +name = "wit-parser" +version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f824ddd5aec8ca6a54307f20c115485a8a919ea94dd26d496d856ca6185f4f" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" dependencies = [ - "winter-air", - "winter-prover", - "winter-verifier", + "anyhow", + "id-arena", + "indexmap", + "log", + "semver 1.0.27", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", ] -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" - [[package]] name = "x25519-dalek" version = "2.0.1" @@ -3627,22 +4084,22 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.36" +version = "0.8.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dafd85c832c1b68bbb4ec0c72c7f6f4fc5179627d2bc7c26b30e4c0cc11e76cc" +checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.36" +version = "0.8.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cb7e4e8436d9db52fbd6625dbf2f45243ab84994a72882ec8227b99e72b439a" +checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -3653,6 +4110,6 @@ checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zmij" -version = "1.0.17" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02aae0f83f69aafc94776e879363e9771d7ecbffe2c7fbb6c14c5e00dfe88439" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml index be5fec631b..41d8e019b1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ homepage = "https://miden.xyz" license = "MIT" repository = "https://github.com/0xMiden/protocol" rust-version = "1.90" -version = "0.14.0" +version = "0.14.0-beta.2" [profile.release] codegen-units = 1 @@ -31,40 +31,35 @@ lto = true inherits = "dev" opt-level = 1 -# Avoid running the expensive debug assertion in winter-prover -# https://github.com/facebook/winterfell/blob/cd32dce2fd4986c94516113568eefd938fafe31c/prover/src/lib.rs#L355C1-L356 -[profile.test-dev.package.winter-prover] -debug-assertions = false - [profile.bench] codegen-units = 1 lto = true [workspace.dependencies] # Workspace crates -miden-agglayer = { default-features = false, path = "crates/miden-agglayer", version = "0.14" } -miden-block-prover = { default-features = false, path = "crates/miden-block-prover", version = "0.14" } -miden-protocol = { default-features = false, path = "crates/miden-protocol", version = "0.14" } -miden-protocol-macros = { default-features = false, path = "crates/miden-protocol-macros", version = "0.14" } -miden-standards = { default-features = false, path = "crates/miden-standards", version = "0.14" } -miden-testing = { default-features = false, path = "crates/miden-testing", version = "0.14" } -miden-tx = { default-features = false, path = "crates/miden-tx", version = "0.14" } -miden-tx-batch-prover = { default-features = false, path = "crates/miden-tx-batch-prover", version = "0.14" } +miden-agglayer = { default-features = false, path = "crates/miden-agglayer", version = "=0.14.0-beta.2" } +miden-block-prover = { default-features = false, path = "crates/miden-block-prover", version = "=0.14.0-beta.2" } +miden-protocol = { default-features = false, path = "crates/miden-protocol", version = "=0.14.0-beta.2" } +miden-protocol-macros = { default-features = false, path = "crates/miden-protocol-macros", version = "=0.14.0-beta.2" } +miden-standards = { default-features = false, path = "crates/miden-standards", version = "=0.14.0-beta.2" } +miden-testing = { default-features = false, path = "crates/miden-testing", version = "=0.14.0-beta.2" } +miden-tx = { default-features = false, path = "crates/miden-tx", version = "=0.14.0-beta.2" } +miden-tx-batch-prover = { default-features = false, path = "crates/miden-tx-batch-prover", version = "=0.14.0-beta.2" } # Miden dependencies -miden-air = { default-features = false, version = "0.20" } -miden-assembly = { default-features = false, version = "0.20" } -miden-assembly-syntax = { default-features = false, version = "0.20" } -miden-core = { default-features = false, version = "0.20" } -miden-core-lib = { default-features = false, version = "0.20" } -miden-crypto = { default-features = false, version = "0.19" } -miden-mast-package = { default-features = false, version = "0.20" } -miden-processor = { default-features = false, version = "0.20" } -miden-prover = { default-features = false, version = "0.20" } -miden-utils-sync = { default-features = false, version = "0.20" } -miden-verifier = { default-features = false, version = "0.20" } +miden-assembly = { default-features = false, version = "0.22.0" } +miden-assembly-syntax = { default-features = false, version = "0.22.0" } +miden-core = { default-features = false, version = "0.22.0" } +miden-core-lib = { default-features = false, version = "0.22.0" } +miden-crypto = { default-features = false, version = "0.23" } +miden-mast-package = { default-features = false, version = "0.22.0" } +miden-processor = { default-features = false, version = "0.22.0" } +miden-prover = { default-features = false, version = "0.22.0" } +miden-utils-sync = { default-features = false, version = "0.22.0" } +miden-verifier = { default-features = false, version = "0.22.0" } # External dependencies +alloy-sol-types = { default-features = false, version = "1.5" } anyhow = { default-features = false, features = ["backtrace", "std"], version = "1.0" } assert_matches = { default-features = false, version = "1.5" } fs-err = { default-features = false, version = "3" } diff --git a/Makefile b/Makefile index 1eb0f212ec..019bbd2bf9 100644 --- a/Makefile +++ b/Makefile @@ -149,6 +149,8 @@ generate-solidity-test-vectors: ## Regenerate Solidity MMR test vectors using Fo cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateVectors cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateCanonicalZeros cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateVerificationProofData + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateLeafValueVectors + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateClaimAssetVectors # --- benchmarking -------------------------------------------------------------------------------- diff --git a/README.md b/README.md index 8fd2a2178a..0f5a0fbe68 100644 --- a/README.md +++ b/README.md @@ -28,15 +28,15 @@ Miden is currently on release v0.13. This is an early version of the protocol an ### Feature highlights - **Private accounts**. The Miden Operator tracks only commitments to account data in the public database. The users are responsible for keeping track of the state of their accounts. -- **Public accounts**. With public accounts users are be able to store the entire state of their accounts on-chain, thus, eliminating the need to keep track of account states locally (albeit by sacrificing privacy and at a higher cost). +- **Public accounts**. With public accounts, users are able to store the entire state of their accounts on-chain, thus, eliminating the need to keep track of account states locally (albeit by sacrificing privacy and at a higher cost). - **Private notes**. Like with private accounts, the Miden Operator tracks only commitments to notes in the public database. Users need to communicate note details to each other via side channels. -- **Public notes**. With public notes, the users are be able to store all note details on-chain, thus, eliminating the need to communicate note details via side-channels. +- **Public notes**. With public notes, users are able to store all note details on-chain, thus, eliminating the need to communicate note details via side-channels. - **Local transactions**. Users can execute and prove transactions locally on their devices. The Miden Operator verifies the proofs and if the proofs are valid, updates the state of the rollup accordingly. - **Standard account**. Users can create accounts using a small number of standard account interfaces (e.g., basic wallet). In the future, the set of standard smart contracts will be expanded. -- **Standard notes**. Can create notes using standardized note scripts such as Pay-to-ID (`P2ID`) and atomic swap (`SWAP`). In the future, the set of standardized notes will be expanded. -- **Delegated note inclusion proofs**. By delegating note inclusion proofs, users can create chains of dependent notes which are included into a block as a single batch. +- **Standard notes**. Users can create notes using standardized note scripts such as Pay-to-ID (`P2ID`) and atomic swap (`SWAP`). In the future, the set of standardized notes will be expanded. +- **Delegated note inclusion proofs**. By delegating note inclusion proofs, users can create chains of dependent transactions which are included into a block as a single batch. - **Transaction recency conditions**. Users are able to specify how close to the chain tip their transactions are to be executed. This enables things like rate limiting and oracles. -- **Network transactions**. Users will be able to create notes intended for network execution. Such notes will be included into transactions executed and proven by the Miden operator. +- **Network transactions**. Users are able to create notes intended for network execution. Such notes are included into transactions executed and proven by the Miden operator. ### Planned features @@ -44,12 +44,12 @@ Miden is currently on release v0.13. This is an early version of the protocol an ## Project structure -| Crate | Description | -| ------------------------------- | ------------------------------------------------------------------------------- | -| [miden-protocol](crates/miden-protocol) | Contains core components defining the Miden protocol, including the transaction kernel. | -| [miden-standards](crates/miden-standards) | Contains the code of Miden's standardized smart contracts. | -| [miden-tx](crates/miden-tx) | Contains tool for creating, executing, and proving Miden rollup transaction. | -| [bench-tx](bin/bench-tx) | Contains transaction execution and proving benchmarks. | +| Crate | Description | +| ----------------------------------------- | --------------------------------------------------------------------------------------- | +| [miden-protocol](crates/miden-protocol) | Contains core components defining the Miden protocol, including the transaction kernel. | +| [miden-standards](crates/miden-standards) | Contains the code of Miden's standardized smart contracts. | +| [miden-tx](crates/miden-tx) | Contains tools for creating, executing, and proving Miden rollup transactions. | +| [bench-tx](bin/bench-tx) | Contains transaction execution and proving benchmarks. | ## Make commands diff --git a/bin/bench-note-checker/src/lib.rs b/bin/bench-note-checker/src/lib.rs index cb08ab832e..bb2c4cd546 100644 --- a/bin/bench-note-checker/src/lib.rs +++ b/bin/bench-note-checker/src/lib.rs @@ -1,6 +1,6 @@ use miden_protocol::account::AccountId; use miden_protocol::asset::FungibleAsset; -use miden_protocol::crypto::rand::RpoRandomCoin; +use miden_protocol::crypto::rand::RandomCoin; use miden_protocol::note::{Note, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, @@ -83,7 +83,7 @@ pub fn setup_mixed_notes_benchmark(config: MixedNotesConfig) -> anyhow::Result Result { let mut builder = MockChain::builder(); let fungible_asset = FungibleAsset::mock(150); let account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset], )?; @@ -41,9 +42,10 @@ pub fn tx_create_single_p2id_note() -> Result { # => [note_idx] # move the asset to the note - push.{asset} + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] # truncate the stack @@ -53,7 +55,8 @@ pub fn tx_create_single_p2id_note() -> Result { RECIPIENT = output_note.recipient().digest(), note_type = NoteType::Public as u8, tag = output_note.metadata().tag(), - asset = Word::from(fungible_asset), + ASSET_KEY = fungible_asset.to_key_word(), + ASSET_VALUE = fungible_asset.to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_note_creation_script)?; @@ -61,7 +64,7 @@ pub fn tx_create_single_p2id_note() -> Result { // construct the transaction context mock_chain .build_tx_context(account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .tx_script(tx_script) .disable_debug_mode() .build() @@ -76,8 +79,9 @@ pub fn tx_consume_single_p2id_note() -> Result { let mut builder = MockChain::builder(); // Create target account - let target_account = - builder.create_new_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let target_account = builder.create_new_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create the note let note = builder @@ -103,8 +107,9 @@ pub fn tx_consume_single_p2id_note() -> Result { pub fn tx_consume_two_p2id_notes() -> Result { let mut builder = MockChain::builder(); - let account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let fungible_asset_1: Asset = FungibleAsset::mock(100); let fungible_asset_2: Asset = FungibleAsset::mock(23); diff --git a/bin/bench-transaction/src/time_counting_benchmarks/prove.rs b/bin/bench-transaction/src/time_counting_benchmarks/prove.rs index 5dafb4604d..edad46c21e 100644 --- a/bin/bench-transaction/src/time_counting_benchmarks/prove.rs +++ b/bin/bench-transaction/src/time_counting_benchmarks/prove.rs @@ -89,12 +89,15 @@ fn core_benchmarks(c: &mut Criterion) { }, |tx_context| async move { // benchmark the transaction execution and proving - black_box(prove_transaction( - tx_context - .execute() - .await - .expect("execution of the single P2ID note consumption tx failed"), - )) + black_box( + prove_transaction( + tx_context + .execute() + .await + .expect("execution of the single P2ID note consumption tx failed"), + ) + .await, + ) }, BatchSize::SmallInput, ); @@ -110,12 +113,15 @@ fn core_benchmarks(c: &mut Criterion) { }, |tx_context| async move { // benchmark the transaction execution and proving - black_box(prove_transaction( - tx_context - .execute() - .await - .expect("execution of the two P2ID note consumption tx failed"), - )) + black_box( + prove_transaction( + tx_context + .execute() + .await + .expect("execution of the two P2ID note consumption tx failed"), + ) + .await, + ) }, BatchSize::SmallInput, ); @@ -124,10 +130,10 @@ fn core_benchmarks(c: &mut Criterion) { execute_and_prove_group.finish(); } -fn prove_transaction(executed_transaction: ExecutedTransaction) -> Result<()> { +async fn prove_transaction(executed_transaction: ExecutedTransaction) -> Result<()> { let executed_transaction_id = executed_transaction.id(); let proven_transaction: ProvenTransaction = - LocalTransactionProver::default().prove(executed_transaction)?; + LocalTransactionProver::default().prove(executed_transaction).await?; assert_eq!(proven_transaction.id(), executed_transaction_id); Ok(()) diff --git a/crates/miden-agglayer/Cargo.toml b/crates/miden-agglayer/Cargo.toml index 7541b7ea8d..efe8b903c1 100644 --- a/crates/miden-agglayer/Cargo.toml +++ b/crates/miden-agglayer/Cargo.toml @@ -23,12 +23,23 @@ testing = ["miden-protocol/testing"] # Miden dependencies miden-assembly = { workspace = true } miden-core = { workspace = true } +miden-core-lib = { workspace = true } miden-protocol = { workspace = true } miden-standards = { workspace = true } miden-utils-sync = { workspace = true } +# Third-party dependencies +alloy-sol-types = { workspace = true } +primitive-types = { workspace = true } +thiserror = { workspace = true } + +# Crypto +miden-crypto = { workspace = true } + [dev-dependencies] miden-agglayer = { features = ["testing"], path = "." } +serde = { features = ["derive"], workspace = true } +serde_json = { version = "1.0" } [build-dependencies] fs-err = { workspace = true } diff --git a/crates/miden-agglayer/SPEC.md b/crates/miden-agglayer/SPEC.md new file mode 100644 index 0000000000..924330d031 --- /dev/null +++ b/crates/miden-agglayer/SPEC.md @@ -0,0 +1,691 @@ +# AggLayer <> Miden Bridge Integration Specification + +**Scope:** Implementation-accurate specification of the AggLayer bridge integration on +Miden, covering contracts, note flows, storage, and encoding semantics. + +**Baseline:** Branch `agglayer` (to-be-tagged `v0.14-alpha`). All statements in sections 1-3 describe +current implementation behaviour and are cross-checked against the test suite in +`crates/miden-testing/tests/agglayer/`. Planned changes that diverge from the current +implementation are called out inline with `TODO (Future)` markers. + +**Conventions:** + +- *Word* = 4 field elements (felts), each < p (Goldilocks prime 2^64 - 2^32 + 1). +- *Felt* = a single Goldilocks field element. +- Word values in this spec use **element-index notation** matching Rust's + `Word::new([e0, e1, e2, e3])`. MASM doc comments use **stack notation** (top-first), + which reverses the order: stack `[a, b, c, d]` = Word `[d, c, b, a]`. +- Procedure input/output signatures use **stack notation** (top-first), matching the + MASM doc comments. +- `TODO (Future)` marks non-implemented design points. + +--- + +## 1. Entities and Trust Model + +| Entity | Description | Account type | +|--------|-------------|--------------| +| **User** | End-user Miden account that holds assets and initiates bridge-out deposits, or receives assets from a bridge-in claim. | Any account with `basic_wallet` component | +| **AggLayer Bridge** | Onchain bridge account that manages the Local Exit Tree (LET), faucet registry, and GER state. Consumes B2AGG, CONFIG, and UPDATE_GER notes. | Network-mode account with a single `bridge` component | +| **AggLayer Faucet** | Fungible faucet that represents a single bridged token. Mints on bridge-in claims, burns on bridge-out. Each foreign token has its own faucet instance. | `FungibleFaucet`, network-mode, with `agglayer_faucet` component | +| **Integration Service** (offchain) | Observes L1 events (deposits, GER updates) and creates UPDATE_GER and CLAIM notes on Miden. Trusted to provide correct proofs and data. | Not an onchain entity; creates notes targeting bridge/faucet | +| **Bridge Operator** (offchain) | Deploys bridge and faucet accounts. Creates CONFIG_AGG_BRIDGE notes to register faucets. Must use the bridge admin account. | Not an onchain entity; creates config notes | + +### Current permissions + +| Note type | Issuer (sender check) | Consumer (consuming-account check) | +|-----------|----------------------|-----------------------------------| +| B2AGG (bridge-out) | Any user -- not restricted | Bridge account -- **enforced** via `NetworkAccountTarget` attachment | +| B2AGG (reclaim) | Any user -- not restricted | Original sender only -- **enforced**: script checks `sender == consuming account` | +| CONFIG_AGG_BRIDGE | Bridge admin only -- **enforced** by `bridge_config::register_faucet` procedure | Bridge account -- **enforced** via `NetworkAccountTarget` attachment | +| UPDATE_GER | GER manager only -- **enforced** by `bridge_config::update_ger` procedure | Bridge account -- **enforced** via `NetworkAccountTarget` attachment | +| CLAIM | Anyone -- not restricted | Target faucet only -- **enforced** via `NetworkAccountTarget` attachment | + +--- + +## 2. Contracts and Public Interfaces + +### 2.1 Bridge Account Component + +The bridge account has a single unified `bridge` component (`components/bridge.masm`), +which is a thin wrapper that re-exports procedures from the `agglayer` library modules: + +- `bridge_config::register_faucet` +- `bridge_config::update_ger` +- `bridge_out::bridge_out` + +The underlying library code lives in `asm/agglayer/bridge/` with supporting modules in +`asm/agglayer/common/`. + +#### `bridge_out::bridge_out` + +| | | +|-|-| +| **Invocation** | `call` | +| **Inputs** | `[ASSET, dest_network_id, dest_addr(5), pad(4)]` | +| **Outputs** | `[]` | +| **Context** | Consuming a `B2AGG` note on the bridge account | +| **Panics** | Faucet not in registry; FPI to faucet fails | + +Bridges an asset out of Miden into the AggLayer: + +1. Validates the asset's faucet is registered in the faucet registry. +2. FPIs to `agglayer_faucet::asset_to_origin_asset` on the faucet account to obtain the scaled U256 amount, origin token address, and origin network. +3. Builds a leaf-data structure in memory (leaf type, origin network, origin token address, destination network, destination address, amount, metadata hash). +4. Computes the Keccak-256 leaf value and appends it to the Local Exit Tree (MMR frontier). +5. Creates a public `BURN` note targeting the faucet via a `NetworkAccountTarget` attachment. + +#### `bridge_config::register_faucet` + +| | | +|-|-| +| **Invocation** | `call` | +| **Inputs** | `[faucet_id_prefix, faucet_id_suffix, pad(14)]` | +| **Outputs** | `[pad(16)]` | +| **Context** | Consuming a `CONFIG_AGG_BRIDGE` note on the bridge account | +| **Panics** | Note sender is not the bridge admin | + +Asserts the note sender matches the bridge admin stored in +`agglayer::bridge::admin_account_id`, then writes +`[0, 0, faucet_id_suffix, faucet_id_prefix] -> [1, 0, 0, 0]` into the +`faucet_registry_map` map slot. + +#### `bridge_config::update_ger` + +| | | +|-|-| +| **Invocation** | `call` | +| **Inputs** | `[GER_LOWER(4), GER_UPPER(4), pad(8)]` | +| **Outputs** | `[pad(16)]` | +| **Context** | Consuming an `UPDATE_GER` note on the bridge account | +| **Panics** | Note sender is not the GER manager | + +Asserts the note sender matches the GER manager stored in +`agglayer::bridge::ger_manager_account_id`, then computes +`KEY = rpo256::merge(GER_UPPER, GER_LOWER)` and stores +`KEY -> [1, 0, 0, 0]` in the `ger_map` map slot. This marks the GER as "known". + +#### `bridge_in::verify_leaf_bridge` +TODO ([#2624](https://github.com/0xMiden/protocol/issues/2624)): document new CLAIM note flow. + +| | | +|-|-| +| **Invocation** | `call` (invoked via FPI from the faucet) | +| **Inputs** | `[LEAF_DATA_KEY, PROOF_DATA_KEY, pad(8)]` on the operand stack; proof data and leaf data in the advice map | +| **Outputs** | `[pad(16)]` | +| **Context** | FPI target -- called by the faucet during `CLAIM` consumption | +| **Panics** | GER not known; global index not mainnet; rollup index non-zero; Merkle proof verification failed | + +Verifies a bridge-in claim: + +1. Retrieves leaf data from the advice map, computes the Keccak-256 leaf value. +2. Retrieves proof data from the advice map: SMT proofs, global index, exit roots. +3. Computes the GER from `mainnet_exit_root` and `rollup_exit_root`, asserts it is in + the known GER set. +4. Extracts the leaf index from the global index (must be mainnet, rollup index = 0). (TODO (Future): rollup indices are not processed yet [#2394](https://github.com/0xMiden/protocol/issues/2394)). +5. Verifies the Merkle proof: leaf value at `leaf_index` against `mainnet_exit_root`. + +#### Bridge Account Storage + +| Slot name | Slot type | Key encoding | Value encoding | Purpose | +|-----------|-----------|-------------|----------------|---------| +| `agglayer::bridge::ger_map` | Map | `rpo256::merge(GER_UPPER, GER_LOWER)` | `[1, 0, 0, 0]` if known; `[0, 0, 0, 0]` if absent | Known Global Exit Root set | +| `agglayer::bridge::let_frontier` | Map | `[h, 0, 0, 0]` and `[h, 1, 0, 0]` (for h = 0..31) | Per index h: two keys yield one double-word (2 words = 8 felts, a Keccak-256 digest). Absent keys return zeros. | Local Exit Tree MMR frontier | +| `agglayer::bridge::let_root_lo` | Value | -- | `[root_0, root_1, root_2, root_3]` | LET root low word (Keccak-256 lower 16 bytes) | +| `agglayer::bridge::let_root_hi` | Value | -- | `[root_4, root_5, root_6, root_7]` | LET root high word (Keccak-256 upper 16 bytes) | +| `agglayer::bridge::let_num_leaves` | Value | -- | `[count, 0, 0, 0]` | Number of leaves appended to the LET | +| `agglayer::bridge::faucet_registry_map` | Map | `[0, 0, faucet_id_suffix, faucet_id_prefix]` | `[1, 0, 0, 0]` if registered; `[0, 0, 0, 0]` if absent | Registered faucet lookup | +| `agglayer::bridge::admin_account_id` | Value | -- | `[0, 0, admin_suffix, admin_prefix]` | Bridge admin account ID for CONFIG note authorization | +| `agglayer::bridge::ger_manager_account_id` | Value | -- | `[0, 0, mgr_suffix, mgr_prefix]` | GER manager account ID for UPDATE_GER note authorization | + +Initial state: all map slots empty, all value slots `[0, 0, 0, 0]` except +`admin_account_id` and `ger_manager_account_id` which are set at account creation time. + +### 2.2 Faucet Account Component + +The faucet account has the `agglayer_faucet` component (`components/faucet.masm`), +which is a thin wrapper that re-exports procedures from the `agglayer` library: + +- `faucet::claim` +- `faucet::asset_to_origin_asset` +- `faucet::burn` + +The underlying library code lives in `asm/agglayer/faucet/mod.masm` with supporting +modules in `asm/agglayer/common/`. + +#### `agglayer_faucet::claim` + +| | | +|-|-| +| **Invocation** | `call` | +| **Inputs** | `[PROOF_DATA_KEY, LEAF_DATA_KEY, faucet_mint_amount, pad(7)]` | +| **Outputs** | `[pad(16)]` | +| **Context** | Consuming a `CLAIM` note on the faucet account | +| **Panics** | Invalid proof; bridge ID not set; FPI to bridge fails; faucet distribution fails | + +Processes a bridge-in claim: + +1. Loads and verifies two advice map entries (proof data, leaf data) into memory. +2. Extracts the destination account ID from the leaf data's destination address (via `eth_address::to_account_id`). +3. Extracts the raw U256 claim amount from the leaf data. +4. FPI to `bridge_in::verify_leaf_bridge` on the bridge account to validate the proof. +5. Verifies `faucet_mint_amount` (passed on the stack from the CLAIM note script) against the U256 amount and scale factor using `asset_conversion::verify_u256_to_native_amount_conversion`. This ensures the amount conversion was performed correctly off-chain, without requiring expensive U256 division inside the VM. +6. Mints the asset via `faucets::distribute` and creates a public P2ID output note for the recipient. The P2ID serial number is derived deterministically from `PROOF_DATA_KEY` (RPO256 hash of the proof data), and the note tag is computed at runtime from the destination account's prefix. + +#### `agglayer_faucet::asset_to_origin_asset` + +| | | +|-|-| +| **Invocation** | `call` (invoked via FPI from the bridge) | +| **Inputs** | `[amount, pad(15)]` | +| **Outputs** | `[AMOUNT_U256_0(4), AMOUNT_U256_1(4), addr(5), origin_network, pad(2)]` | +| **Context** | FPI target -- called by the bridge during bridge-out | +| **Panics** | Scale exceeds 18 | + +Converts a Miden-native asset amount to the origin chain's U256 representation: + +1. Reads the scale from storage, calls `asset_conversion::scale_native_amount_to_u256`. +2. Returns the origin token address and origin network from storage. + +#### `agglayer_faucet::burn` + +This is a re-export of `miden::standards::faucets::basic_fungible::burn`. It burns the fungible asset from the active note, decreasing the faucet's token supply. + +| | | +|-|-| +| **Invocation** | `call` | +| **Inputs** | `[pad(16)]` | +| **Outputs** | `[pad(16)]` | +| **Context** | Consuming a `BURN` note on the faucet account | +| **Panics** | Note context invalid; asset count wrong; faucet/supply checks fail | + +#### Faucet Account Storage + +| Slot name | Slot type | Value encoding | Purpose | +|-----------|-----------|----------------|---------| +| Faucet metadata (standard) | Value | `[token_supply, max_supply, decimals, token_symbol]` | Standard `NetworkFungibleFaucet` metadata | +| `agglayer::faucet::conversion_info_1` | Value | `[addr_0, addr_1, addr_2, addr_3]` | Origin token address, first 4 u32 limbs | +| `agglayer::faucet::conversion_info_2` | Value | `[addr_4, origin_network, scale, 0]` | Origin token address 5th limb, origin network ID, scale exponent | + +--- + +## 3. Note Types and Storage Layouts + +**Encoding conventions:** All multi-byte values in note storage (addresses, U256 +integers, Keccak-256 hashes) are encoded as arrays of u32 felts via +`bytes_to_packed_u32_felts`: big-endian limb order with **little-endian byte order** +within each 4-byte limb (see [Section 5.5](#55-endianness-summary)). Scalar u32 fields +(network IDs) are byte-reversed at storage time so their in-memory bytes align with the +Keccak preimage format directly — the felt value does **not** equal the numeric value +(e.g., chain ID `1` = `0x00000001` is stored as felt `0x01000000`). + +### 3.1 B2AGG +(Bridge-to-AggLayer) + +**Purpose:** User bridges an asset from Miden to the AggLayer. + +**`NoteHeader`** + +*`NoteMetadata`:* + +| Field | Value | +|-------|-------| +| `sender` | Any account (not validated) | +| `note_type` | `NoteType::Public` | +| `tag` | `NoteTag::default()` | +| `attachment` | `NetworkAccountTarget` -- target is the bridge account; execution hint: Always | + +**`NoteDetails`** + +*`NoteAssets`:* Exactly 1 fungible asset. + +*`NoteRecipient`:* + +| Field | Value | +|-------|-------| +| `serial_num` | Random (`rng.draw_word()`) | +| `script` | `B2AGG.masb` | +| `storage` | 6 felts -- see layout below | + +**Storage layout (6 felts):** + +| Index | Field | Encoding | +|-------|-------|----------| +| 0 | `destination_network` | u32 | +| 1-5 | `destination_address` | 5 x u32 felts (20-byte Ethereum address) | + +**Consumption:** + +- **Bridge-out:** Consuming account is the bridge -> note validates attachment target, + loads storage and asset, calls `bridge_out::bridge_out`. +- **Reclaim:** Consuming account is the original sender -> assets are added back to the + account via `basic_wallet::add_assets_to_account`. No output notes. + +### 3.2 CLAIM + +**Purpose:** Claim assets, which were deposited on any AggLayer-connected rollup, on Miden. Consumed by +the faucet (TODO (Future): [Re-orient `CLAIM` note flow](https://github.com/0xMiden/protocol/issues/2506) through the bridge account), which mints the asset and sends it to the recipient. + +**`NoteHeader`** + +*`NoteMetadata`:* + +| Field | Value | +|-------|-------| +| `sender` | Any account (not validated) | +| `note_type` | `NoteType::Public` | +| `tag` | `NoteTag::default()` | +| `attachment` | `NetworkAccountTarget` -- target is the faucet account; execution hint: Always | + +**`NoteDetails`** + +*`NoteAssets`:* None (empty). + +*`NoteRecipient`:* + +| Field | Value | +|-------|-------| +| `serial_num` | Random (`rng.draw_word()`) | +| `script` | `CLAIM.masb` | +| `storage` | 569 felts -- see layout below | + +**Storage layout (569 felts):** + +The storage is divided into three logical regions: proof data (felts 0-535), leaf data +(felts 536-567), and the native claim amount (felt 568). + +| Range | Field | Size (felts) | Encoding | +|-------|-------|-------------|----------| +| 0-255 | `smt_proof_local_exit_root` | 256 | 32 x Keccak-256 nodes (8 felts each) | +| 256-511 | `smt_proof_rollup_exit_root` | 256 | 32 x Keccak-256 nodes (8 felts each) | +| 512-519 | `global_index` | 8 | U256 as 8 x u32 felts | +| 520-527 | `mainnet_exit_root` | 8 | Keccak-256 hash as 8 x u32 felts | +| 528-535 | `rollup_exit_root` | 8 | Keccak-256 hash as 8 x u32 felts | +| 536 | `leaf_type` | 1 | u32 (0 = asset) | +| 537 | `origin_network` | 1 | u32 | +| 538-542 | `origin_token_address` | 5 | 5 x u32 felts | +| 543 | `destination_network` | 1 | u32 | +| 544-548 | `destination_address` | 5 | 5 x u32 felts | +| 549-556 | `amount` | 8 | U256 as 8 x u32 felts | +| 557-564 | `metadata_hash` | 8 | Keccak-256 hash as 8 x u32 felts | +| 565-567 | padding | 3 | zeros | +| 568 | `miden_claim_amount` | 1 | Scaled-down Miden token amount (Felt). Computed as `floor(amount / 10^scale)` | + +**Consumption:** + +1. Script asserts consuming account matches the target faucet via `NetworkAccountTarget` + attachment (checked before loading storage). +2. All 569 felts are loaded into memory. +3. The `miden_claim_amount` is read from memory index 568 and placed on the stack. +4. Proof data and leaf data regions are hashed and inserted into the advice map as two + keyed entries (`PROOF_DATA_KEY`, `LEAF_DATA_KEY`). +5. `agglayer_faucet::claim` is called with `[PROOF_DATA_KEY, LEAF_DATA_KEY, miden_claim_amount]` + on the stack. It validates the proof via FPI to the bridge, verifies the native claim + amount conversion, then mints and creates a P2ID output note. + +### 3.3 CONFIG_AGG_BRIDGE + +**Purpose:** Registers a faucet in the bridge's faucet registry. + +**`NoteHeader`** + +*`NoteMetadata`:* + +| Field | Value | +|-------|-------| +| `sender` | Bridge admin (sender authorization enforced by the bridge's `register_faucet` procedure) | +| `note_type` | `NoteType::Public` | +| `tag` | `NoteTag::default()` | +| `attachment` | `NetworkAccountTarget` -- target is the bridge account; execution hint: Always | + +**`NoteDetails`** + +*`NoteAssets`:* None (empty). + +*`NoteRecipient`:* + +| Field | Value | +|-------|-------| +| `serial_num` | Random (`rng.draw_word()`) | +| `script` | `CONFIG_AGG_BRIDGE.masb` | +| `storage` | 2 felts -- see layout below | + +**Storage layout (2 felts):** + +| Index | Field | Encoding | +|-------|-------|----------| +| 0 | `faucet_id_prefix` | Felt (AccountId prefix) | +| 1 | `faucet_id_suffix` | Felt (AccountId suffix) | + +**Consumption:** Script validates attachment target, loads storage, and calls +`bridge_config::register_faucet` (which asserts sender is bridge admin). + +### 3.4 UPDATE_GER + +**Purpose:** Stores a new Global Exit Root (GER) in the bridge account so that subsequent +CLAIM notes can be verified against it. + +**`NoteHeader`** + +*`NoteMetadata`:* + +| Field | Value | +|-------|-------| +| `sender` | GER manager (sender authorization enforced by the bridge's `update_ger` procedure) | +| `note_type` | `NoteType::Public` | +| `tag` | `NoteTag::default()` | +| `attachment` | `NetworkAccountTarget` -- target is the bridge account; execution hint: Always | + +**`NoteDetails`** + +*`NoteAssets`:* None (empty). + +*`NoteRecipient`:* + +| Field | Value | +|-------|-------| +| `serial_num` | Random (`rng.draw_word()`) | +| `script` | `UPDATE_GER.masb` | +| `storage` | 8 felts -- see layout below | + +**Storage layout (8 felts):** + +| Range | Field | Encoding | +|-------|-------|----------| +| 0-3 | `GER_LOWER` | First 16 bytes as 4 x u32 felts | +| 4-7 | `GER_UPPER` | Last 16 bytes as 4 x u32 felts | + +**Consumption:** Script validates attachment target, loads storage, and calls +`bridge_config::update_ger` (which asserts sender is GER manager), which computes +`rpo256::merge(GER_UPPER, GER_LOWER)` and stores the result in the GER map. + +### 3.5 BURN (generated) + +**Purpose:** Created by `bridge_out::bridge_out` to burn the bridged asset on the faucet. + +**`NoteHeader`** + +*`NoteMetadata`:* + +| Field | Value | +|-------|-------| +| `sender` | Bridge account | +| `note_type` | `NoteType::Public` | +| `tag` | `NoteTag::default()` | +| `attachment` | `NetworkAccountTarget` -- target is the faucet account; execution hint: Always | + +**`NoteDetails`** + +*`NoteAssets`:* The single fungible asset from the originating B2AGG note. + +*`NoteRecipient`:* + +| Field | Value | +|-------|-------| +| `serial_num` | Derived as `rpo256::merge(B2AGG_SERIAL_NUM, ASSET)` | +| `script` | Standard BURN script (`miden::standards::notes::burn::main`) | +| `storage` | None (0 felts) | + +**Storage layout (0 felts):** + +No fields -- this is a standard burn note with no custom data. + +**Consumption:** + +The standard BURN script calls `faucets::burn` on the consuming faucet account. This +validates that the note contains exactly one fungible asset issued by that faucet and +decreases the faucet's total token supply by the burned amount. + +### 3.6 P2ID (generated) + +**Purpose:** Created by `agglayer_faucet::claim` to deliver minted assets to the recipient. + +**`NoteHeader`** + +*`NoteMetadata`:* + +| Field | Value | +|-------|-------| +| `sender` | Faucet account | +| `note_type` | `NoteType::Public` | +| `tag` | Computed at runtime from destination account prefix via `note_tag::create_account_target` | +| `attachment` | None | + +**`NoteDetails`** + +*`NoteAssets`:* The minted fungible asset for the claim amount. + +*`NoteRecipient`:* + +| Field | Value | +|-------|-------| +| `serial_num` | Derived deterministically from `PROOF_DATA_KEY` (RPO256 hash of the CLAIM proof data) | +| `script` | Standard P2ID script (`miden::standards::notes::p2id::main`) | +| `storage` | 2 felts -- see layout below | + +**Storage layout (2 felts):** + +| Index | Field | Encoding | +|-------|-------|----------| +| 0 | `target_account_id_prefix` | Felt (AccountId prefix) | +| 1 | `target_account_id_suffix` | Felt (AccountId suffix) | + +**Consumption:** + +Consuming account must match `target_account_id` from note storage (enforced by the P2ID +script). All note assets are added to the consuming account via +`basic_wallet::add_assets_to_account`. + +--- + +## 4. Amount Conversion + +*This section is a placeholder. Content to be added.* + +--- + +## 5. Ethereum ↔ Miden Address Conversion + +The AggLayer bridge operates across two address spaces: Ethereum's 20-byte addresses and +Miden's `AccountId` (two field elements). This section specifies the encoding that maps +between them, as implemented in Rust (`eth_types/address.rs`) and MASM +(`agglayer/common/eth_address.masm`). + +### 5.1 Background + +Miden's `AccountId` (version 0) consists of two Goldilocks field elements: + +```text +prefix: [hash (56 bits) | storage_mode (2 bits) | type (2 bits) | version (4 bits)] +suffix: [zero_bit | hash (55 bits) | 8 zero_bits] +``` + +Each element is a `u64` value less than the Goldilocks prime `p = 2^64 − 2^32 + 1`, +giving a combined 120 bits of entropy. A prefix is always a valid felt because it derives +directly from a hash output; the suffix's MSB is constrained to zero and its lower 8 bits +are zeroed. + +Ethereum addresses are 20-byte (160-bit) values. Because every valid `AccountId` fits in +16 bytes (prefix: 8 bytes, suffix: 8 bytes), it can be embedded into the lower 16 bytes +of an Ethereum address with 4 zero-padding bytes at the top. + +### 5.2 Embedded Format + +An `AccountId` is embedded in a 20-byte Ethereum address as follows: + +```text + Byte offset: 0 4 8 12 16 20 + ┌────┬─────────┬─────────┐ + │0000│ prefix │ suffix │ + └────┴─────────┴─────────┘ + 4B 8B 8B +``` + +| Byte range | Content | Encoding | +|------------|---------|----------| +| `[0..4)` | Zero padding | Must be `0x00000000` | +| `[4..12)` | `prefix` | Big-endian `u64` (`felts[0].as_int().to_be_bytes()`) | +| `[12..20)` | `suffix` | Big-endian `u64` (`felts[1].as_int().to_be_bytes()`) | + +**Example conversions:** + +| Bech32 | Ethereum address | +|--------|-----------------| +| `mtst1azcw08rget79fqp8ymr0zqkv5v5lj466` | `0x00000000b0e79c68cafc54802726c6f102cca300` | +| `mtst1arxmxavamh7lqyp79mexktt4vgxv40mp` | `0x00000000cdb3759dddfdf0103e2ef26b2d756200` | +| `mtst1ar2phe0pa0ln75plsczxr8ryws4s8zyp` | `0x00000000d41be5e1ebff3f503f8604619c647400` | + +Note that the last byte of the Ethereum address is always `0x00` because the lower 8 bits +of the `AccountId` suffix are always zero. + +**Limitation:** Not all Ethereum addresses are valid Miden accounts. The conversion from +Ethereum address to `AccountId` is partial — it fails if the leading 4 bytes are +non-zero, if the packed `u64` values exceed the field modulus, or if the resulting felts +don't form a valid `AccountId`. Arbitrary Ethereum addresses (e.g., from EOAs or +contracts on L1) cannot generally be decoded into `AccountId` values. + +### 5.3 MASM Limb Representation + +Inside the Miden VM, a 20-byte Ethereum address is represented as 5 field elements, each +holding a `u32` value. This layout uses **big-endian limb +order** (matching the Solidity ABI encoding convention): + +| Limb | Byte range | Description | +|------|-----------|-------------| +| `address[0]` | `bytes[0..4]` | Most-significant 4 bytes (must be zero for embedded `AccountId`) | +| `address[1]` | `bytes[4..8]` | Upper half of prefix | +| `address[2]` | `bytes[8..12]` | Lower half of prefix | +| `address[3]` | `bytes[12..16]` | Upper half of suffix | +| `address[4]` | `bytes[16..20]` | Lower half of suffix | + +**Byte order within each limb:** Each 4-byte chunk is packed into a `u32` felt using +**little-endian** byte order, aligning with the expected format for the +Keccak-256 precompile. + +The Rust function `EthAddressFormat::to_elements()` produces exactly this 5-felt array +from a 20-byte address. + +### 5.4 Conversion Procedures + +#### 5.4.1 `AccountId` → Ethereum Address (Rust) + +`EthAddressFormat::from_account_id(account_id: AccountId) -> EthAddressFormat` + +This is the **external API** used by the bridge interface. It lets a user convert a Miden `AccountId` (destination account on Miden) into an Ethereum address that will be encoded in the deposit data. + +**Algorithm:** + +1. Extract the two felts from the `AccountId`: `[prefix_felt, suffix_felt]`. +2. Write the prefix felt's `u64` value as 8 big-endian bytes into `out[4..12]`. +3. Write the suffix felt's `u64` value as 8 big-endian bytes into `out[12..20]`. +4. Leave `out[0..4]` as zeros. + +This conversion is **infallible**: every valid `AccountId` produces a valid 20-byte +address. + +#### 5.4.2 Ethereum Address → `AccountId` (Rust) + +`EthAddressFormat::to_account_id(&self) -> Result` + +This is used internally during CLAIM note processing to extract the recipient's +`AccountId` from the embedded Ethereum address. + +While currently this is only used for testing purposes, the claim manager service could use this to +extract the recipient's `AccountId` from the embedded Ethereum address and e.g. perform some checks on the receiving account, such as checking if the account is new or already has funds. + +**Algorithm:** + +1. Assert `bytes[0..4] == [0, 0, 0, 0]`. Error: `NonZeroBytePrefix`. +2. Read `prefix = u64::from_be_bytes(bytes[4..12])`. +3. Read `suffix = u64::from_be_bytes(bytes[12..20])`. +4. Convert each `u64` to a `Felt` via `Felt::try_from(u64)`. Error: `FeltOutOfField` if + the value ≥ p (would be reduced mod p). +5. Construct `AccountId::try_from([prefix_felt, suffix_felt])`. Error: `InvalidAccountId` + if the felts don't satisfy `AccountId` constraints (invalid version, type, storage + mode, or suffix shape). + +**Error conditions:** + +| Error | Condition | +|-------|-----------| +| `NonZeroBytePrefix` | First 4 bytes are not zero | +| `FeltOutOfField` | A `u64` value ≥ the Goldilocks prime `p` | +| `InvalidAccountId` | The resulting felts don't form a valid `AccountId` | + +#### 5.4.3 Ethereum Address → `AccountId` (MASM) + +`eth_address::to_account_id` — Module: `agglayer::common::eth_address` + +This is the in-VM counterpart of the Rust `to_account_id`, invoked during CLAIM note +consumption to decode the recipient's address from the leaf data, and eventually for building the P2ID note for the recipient. + +**Stack signature:** + +```text +Inputs: [limb0, limb1, limb2, limb3, limb4] +Outputs: [prefix, suffix] +Invocation: exec +``` + +**Algorithm:** + +1. `assertz limb0` — the most-significant limb must be zero (error: `ERR_MSB_NONZERO`). +2. Build `suffix` from `(limb4, limb3)`: + - a. Validate both values are `u32` (error: `ERR_NOT_U32`). + - b. Byte-swap each limb from little-endian to big-endian via `utils::swap_u32_bytes` (see [Section 5.5](#55-endianness-summary)). + - c. Pack into a felt: `suffix = bswap(limb3) × 2^32 + bswap(limb4)`. + - d. Verify no mod-p reduction: split the felt back via `u32split` and assert equality + with the original limbs (error: `ERR_FELT_OUT_OF_FIELD +3. Build `prefix` from `(limb2, limb1)` using the same `build_felt` procedure. +4. Return `[prefix, suffix]` on the stack. + +**Helper: `build_felt`** + +```text +Inputs: [lo, hi] (little-endian u32 limbs, little-endian bytes) +Outputs: [felt] +``` + +1. `u32assert2` — both inputs must be valid `u32`. +2. Byte-swap each limb: `lo_be = bswap(lo)`, `hi_be = bswap(hi)`. +3. Compute `felt = hi_be × 2^32 + lo_be`. +4. Round-trip check: `u32split(felt)` must yield `(hi_be, lo_be)`. If not, the + combined value exceeded the field modulus. + +**Helper: `utils::swap_u32_bytes`** + +```text +Inputs: [value] +Outputs: [swapped] +``` + +Reverses the byte order of a `u32`: `[b0, b1, b2, b3] → [b3, b2, b1, b0]`. + +#### 5.4.4 Ethereum Address → Field Elements (Rust) + +`EthAddressFormat::to_elements(&self) -> Vec` + +Converts the 20-byte address into a field element array for use in the Miden VM. +Each 4-byte chunk is interpreted as a **little-endian** `u32` and stored as a `Felt`. +The output order matches the big-endian limb order described in [Section 5.3](#53-masm-limb-representation). + +This is used when constructing `NoteStorage` for B2AGG notes (see [Section 3.1](#31-b2agg)) and CLAIM notes (see [Section 3.2](#32-claim)). + +### 5.5 Endianness Summary + +The conversion involves multiple levels of byte ordering: this table clarifies the different conventions used. + +| Level | Convention | Detail | +|-------|-----------|--------| +| **Limb order** | Big-endian | `address[0]` holds the most-significant 4 bytes of the 20-byte address | +| **Byte order within each limb** | Little-endian | The 4 bytes of a limb are packed as `b0 + b1×2^8 + b2×2^16 + b3×2^24` | +| **Felt packing (u64)** | Big-endian u32 pairs | `felt = hi_be × 2^32 + lo_be` where `hi_be` and `lo_be` are field elements representing the big-endian-encoded `u32` values | + +The byte swap (`swap_u32_bytes`) in the MASM `build_felt` procedure bridges between +the little-endian bytes within each limb in `NoteStorage` and the big-endian-bytes within the `u32` pairs needed to construct the prefix and suffix in the MASM `build_felt` procedure. + +### 5.6 Roundtrip Guarantee + +The encoding is a bijection over the set of valid `AccountId` values: for every valid +`AccountId`, `from_account_id` followed by `to_account_id` (or the MASM equivalent) +recovers the original. diff --git a/crates/miden-agglayer/asm/agglayer/bridge/bridge_config.masm b/crates/miden-agglayer/asm/agglayer/bridge/bridge_config.masm new file mode 100644 index 0000000000..7c486135e0 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/bridge_config.masm @@ -0,0 +1,309 @@ +use miden::core::crypto::hashes::poseidon2 +use miden::protocol::account_id +use miden::protocol::active_account +use miden::protocol::active_note +use miden::protocol::native_account + +# ERRORS +# ================================================================================================= + +const ERR_GER_NOT_FOUND = "GER not found in storage" +const ERR_FAUCET_NOT_REGISTERED="faucet is not registered in the bridge's faucet registry" +const ERR_TOKEN_NOT_REGISTERED="token address is not registered in the bridge's token registry" +const ERR_SENDER_NOT_BRIDGE_ADMIN="note sender is not the bridge admin" +const ERR_SENDER_NOT_GER_MANAGER="note sender is not the global exit root manager" + +# CONSTANTS +# ================================================================================================= + +# Storage slots +const BRIDGE_ADMIN_SLOT=word("agglayer::bridge::admin_account_id") +const GER_MANAGER_SLOT=word("agglayer::bridge::ger_manager_account_id") +const GER_MAP_STORAGE_SLOT=word("agglayer::bridge::ger_map") +const FAUCET_REGISTRY_MAP_SLOT=word("agglayer::bridge::faucet_registry_map") +const TOKEN_REGISTRY_MAP_SLOT=word("agglayer::bridge::token_registry_map") + +# Flags +const GER_KNOWN_FLAG=1 +const IS_FAUCET_REGISTERED_FLAG=1 + +# Offset in the local memory of the `hash_token_address` procedure +const TOKEN_ADDR_HASH_PTR=0 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Updates the Global Exit Root (GER) in the bridge account storage. +#! +#! Computes hash(GER) = poseidon2::merge(GER_LOWER, GER_UPPER) and stores it in a map +#! with value [GER_KNOWN_FLAG, 0, 0, 0] to indicate the GER is known. +#! +#! Panics if the note sender is not the global exit root manager. +#! +#! Inputs: [GER_LOWER[4], GER_UPPER[4], pad(8)] +#! Outputs: [pad(16)] +#! +#! Invocation: call +pub proc update_ger + # assert the note sender is the global exit root manager. + exec.assert_sender_is_ger_manager + # => [GER_LOWER[4], GER_UPPER[4], pad(8)] + + # compute hash(GER) = poseidon2::merge(GER_LOWER, GER_UPPER) + exec.poseidon2::merge + # => [GER_HASH, pad(12)] + + # prepare VALUE = [GER_KNOWN_FLAG, 0, 0, 0] + push.0.0.0.GER_KNOWN_FLAG + # => [GER_KNOWN_FLAG, 0, 0, 0, GER_HASH, pad(12)] + + swapw + # => [GER_HASH, VALUE, pad(12)] + + push.GER_MAP_STORAGE_SLOT[0..2] + # => [slot_id_prefix, slot_id_suffix, GER_HASH, VALUE, pad(12)] + + exec.native_account::set_map_item + # => [OLD_VALUE, pad(12)] + dropw + # => [pad(16)] +end + +#! Asserts that the provided GER is valid (exists in storage). +#! +#! Computes hash(GER) = poseidon2::merge(GER_LOWER, GER_UPPER) and looks up the hash in +#! the GER storage map. Panics if the GER has never been stored. +#! +#! Inputs: [GER_ROOT[8]] +#! Outputs: [] +#! +#! Panics if: +#! - the GER is not found in storage. +#! +#! Invocation: exec +pub proc assert_valid_ger + # compute hash(GER) + exec.poseidon2::merge + # => [GER_HASH] + + push.GER_MAP_STORAGE_SLOT[0..2] + # => [slot_id_prefix, slot_id_suffix, GER_HASH] + + exec.active_account::get_map_item + # => [VALUE] + + # assert the GER is known in storage (VALUE = [GER_KNOWN_FLAG, 0, 0, 0]) + push.0.0.0.GER_KNOWN_FLAG + # => [GER_KNOWN_FLAG, 0, 0, 0, VALUE] + + assert_eqw.err=ERR_GER_NOT_FOUND + # => [] +end + +#! Registers a faucet in the bridge's faucet registry and token registry. +#! +#! 1. Writes `KEY -> [1, 0, 0, 0]` into the `faucet_registry` map, where +#! `KEY = [0, 0, faucet_id_suffix, faucet_id_prefix]`. +#! 2. Writes `hash(tokenAddress[5]) -> [faucet_id_suffix, faucet_id_prefix, 0, 0]` +#! into the `token_registry` map. +#! +#! Panics if the note sender is not the bridge admin. +#! +#! Inputs: [origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] +#! Outputs: [pad(16)] +#! +#! Invocation: call +pub proc register_faucet + # assert the note sender is the bridge admin. + exec.assert_sender_is_bridge_admin + # => [origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + # Save faucet ID for later use in token_registry + dup.6 dup.6 + # => [faucet_id_suffix, faucet_id_prefix, origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + # --- 1. Register faucet in faucet_registry --- + # set_map_item expects [slot_id(2), KEY, VALUE] and returns [OLD_VALUE]. + # Build KEY = [0, 0, suffix, prefix] and VALUE = [IS_FAUCET_REGISTERED_FLAG, 0, 0, 0] + push.0.0.0.IS_FAUCET_REGISTERED_FLAG + # => [IS_FAUCET_REGISTERED_FLAG, 0, 0, 0, faucet_id_suffix, faucet_id_prefix, origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + movup.5 movup.5 + # => [faucet_id_suffix, faucet_id_prefix, IS_FAUCET_REGISTERED_FLAG, 0, 0, 0, origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + push.0.0 + # => [[0, 0, faucet_id_suffix, faucet_id_prefix], [IS_FAUCET_REGISTERED_FLAG, 0, 0, 0], origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + push.FAUCET_REGISTRY_MAP_SLOT[0..2] + exec.native_account::set_map_item + # => [OLD_VALUE, origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + dropw + # => [origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + # --- 2. Register token address → faucet ID in token_registry --- + + # Hash the token address + exec.hash_token_address + # => [TOKEN_ADDR_HASH, faucet_id_suffix, faucet_id_prefix, pad(10)] + + # Build VALUE = [0, 0, faucet_id_suffix, faucet_id_prefix] + movup.5 movup.5 + # => [faucet_id_suffix, faucet_id_prefix, TOKEN_ADDR_HASH, pad(10)] + + push.0.0 + # => [0, 0, faucet_id_suffix, faucet_id_prefix, TOKEN_ADDR_HASH, pad(10)] + + swapw + # => [TOKEN_ADDR_HASH, 0, 0, faucet_id_suffix, faucet_id_prefix, pad(10)] + + push.TOKEN_REGISTRY_MAP_SLOT[0..2] + exec.native_account::set_map_item + # => [OLD_VALUE, pad(12)] + + dropw + # => [pad(16)] +end + +#! Asserts that a faucet is registered in the bridge's faucet registry. +#! +#! Looks up the faucet ID in the faucet registry map and asserts the registration +#! flag is set. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix] +#! Outputs: [] +#! +#! Panics if: +#! - the faucet is not registered in the faucet registry. +#! +#! Invocation: exec +pub proc assert_faucet_registered + # Build KEY = [0, 0, faucet_id_suffix, faucet_id_prefix] + push.0.0 + # => [0, 0, faucet_id_suffix, faucet_id_prefix] + + push.FAUCET_REGISTRY_MAP_SLOT[0..2] + exec.active_account::get_map_item + # => [VALUE] + + # the stored word must be [1, 0, 0, 0] for registered faucets + assert.err=ERR_FAUCET_NOT_REGISTERED drop drop drop + # => [] +end + +#! Looks up the faucet account ID for a given origin token address. +#! +#! Hashes the origin token address (5 felts) and looks up the result in the token_registry map. +#! +#! Inputs: [origin_token_addr(5)] +#! Outputs: [faucet_id_suffix, faucet_id_prefix] +#! +#! Panics if: +#! - the token address is not registered in the token registry. +#! +#! Invocation: exec +pub proc lookup_faucet_by_token_address + # Hash the token address + exec.hash_token_address + # => [TOKEN_ADDR_HASH] + + push.TOKEN_REGISTRY_MAP_SLOT[0..2] + exec.active_account::get_map_item + # => [0, 0, faucet_id_suffix, faucet_id_prefix] + + # Assert the token is registered: faucet_id_prefix is always non-zero for valid account IDs. + dup.3 dup.3 push.0.0 + # => [0, 0, faucet_id_suffix, faucet_id_prefix, 0, 0, faucet_id_suffix, faucet_id_prefix] + + exec.account_id::is_equal + # => [is_id_zero, 0, 0, faucet_id_suffix, faucet_id_prefix] + + # If AccountId returned from map is zero, it means the token is not registered. + assertz.err=ERR_TOKEN_NOT_REGISTERED + drop drop + # => [faucet_id_suffix, faucet_id_prefix] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Hashes a 5-felt origin token address using Poseidon2. +#! +#! Writes the 5 felts to memory and computes the Poseidon2 hash. +#! +#! Inputs: [origin_token_addr(5)] +#! Outputs: [TOKEN_ADDR_HASH] +#! +#! Invocation: exec +@locals(8) +proc hash_token_address + # Write origin_token_addr[5] to local memory for hashing + loc_storew_le.TOKEN_ADDR_HASH_PTR dropw + locaddr.TOKEN_ADDR_HASH_PTR add.4 mem_store + # => [] + + # Hash the token address: poseidon2::hash_elements(num_elements=5, start_ptr) + push.5 locaddr.TOKEN_ADDR_HASH_PTR + exec.poseidon2::hash_elements + # => [TOKEN_ADDR_HASH] +end + +#! Asserts that the note sender matches the bridge admin stored in account storage. +#! +#! Reads the bridge admin account ID from BRIDGE_ADMIN_SLOT and compares it against +#! the sender of the currently executing note. Panics if they do not match. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender does not match the bridge admin account ID. +#! +#! Invocation: exec +pub proc assert_sender_is_bridge_admin + # => [pad(16)] + + push.BRIDGE_ADMIN_SLOT[0..2] + exec.active_account::get_item + # => [0, 0, admin_suffix, admin_prefix, pad(16)] + + drop drop + # => [admin_suffix, admin_prefix, pad(16)] + + exec.active_note::get_sender + # => [sender_suffix, sender_prefix, admin_suffix, admin_prefix, pad(16)] + + exec.account_id::is_equal + assert.err=ERR_SENDER_NOT_BRIDGE_ADMIN + # => [pad(16)] +end + +#! Asserts that the note sender matches the global exit root manager stored in account storage. +#! +#! Reads the GER manager account ID from GER_MANAGER_SLOT and compares it against +#! the sender of the currently executing note. Panics if they do not match. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender does not match the GER manager account ID. +#! +#! Invocation: exec +pub proc assert_sender_is_ger_manager + # => [pad(16)] + + push.GER_MANAGER_SLOT[0..2] + exec.active_account::get_item + # => [0, 0, mgr_suffix, mgr_prefix, pad(16)] + + drop drop + # => [mgr_suffix, mgr_prefix, pad(16)] + + exec.active_note::get_sender + # => [sender_suffix, sender_prefix, mgr_suffix, mgr_prefix, pad(16)] + + exec.account_id::is_equal + assert.err=ERR_SENDER_NOT_GER_MANAGER + # => [pad(16)] +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/bridge_in.masm b/crates/miden-agglayer/asm/agglayer/bridge/bridge_in.masm new file mode 100644 index 0000000000..3333f680c3 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/bridge_in.masm @@ -0,0 +1,1099 @@ +use agglayer::bridge::bridge_config +use agglayer::bridge::leaf_utils +use agglayer::common::utils +use agglayer::common::asset_conversion +use agglayer::common::eth_address +use agglayer::faucet -> agglayer_faucet +use miden::core::crypto::hashes::keccak256 +use miden::core::crypto::hashes::poseidon2 +use miden::core::mem +use miden::core::word +use miden::protocol::note +use miden::protocol::output_note +use miden::protocol::output_note::ATTACHMENT_KIND_NONE +use miden::protocol::active_account +use miden::protocol::native_account +use miden::protocol::tx +use miden::standards::note_tag +use miden::standards::note_tag::DEFAULT_TAG +use miden::standards::attachments::network_account_target +use miden::standards::note::execution_hint::ALWAYS +use miden::protocol::types::DoubleWord +use miden::protocol::types::MemoryAddress + +# ERRORS +# ================================================================================================= + +const ERR_BRIDGE_NOT_MAINNET = "mainnet flag must be 1 for a mainnet deposit" +const ERR_BRIDGE_NOT_ROLLUP = "mainnet flag must be 0 for a rollup deposit" +const ERR_LEADING_BITS_NON_ZERO = "leading bits of global index must be zero" +const ERR_MAINNET_FLAG_INVALID = "mainnet flag must be 0 or 1" +const ERR_ROLLUP_INDEX_NON_ZERO = "rollup index must be zero for a mainnet deposit" +const ERR_SMT_ROOT_VERIFICATION_FAILED = "merkle proof verification failed: provided SMT root does not match the computed root" +const ERR_CLAIM_ALREADY_SPENT = "claim note has already been spent" +const ERR_SOURCE_BRIDGE_NETWORK_OVERFLOW = "source bridge network overflowed u32" + +# CONSTANTS +# ================================================================================================= + +# Claim Nullifier Flag +const IS_CLAIMED_FLAG = [1, 0, 0, 0] + +# Storage slots +# ------------------------------------------------------------------------------------------------- + +# The slot in this component's storage layout where claim nullifiers are stored. +# Map entries: RPO(leaf_index, source_bridge_network) => [1, 0, 0, 0] +const CLAIM_NULLIFIERS_SLOT = word("agglayer::bridge::claim_nullifiers") + +# Storage slot constants for the CGI (claimed global index) chain hash. +# It is stored in two separate value slots. +const CGI_CHAIN_HASH_LO_SLOT_NAME = word("agglayer::bridge::cgi_chain_hash_lo") +const CGI_CHAIN_HASH_HI_SLOT_NAME = word("agglayer::bridge::cgi_chain_hash_hi") + +# Data sizes +# ------------------------------------------------------------------------------------------------- + +# the number of words (4 felts each) in the advice map leaf data +const CLAIM_PROOF_DATA_WORD_LEN = 134 +const CLAIM_LEAF_DATA_WORD_LEN = 8 + +# MINT note storage layout (public mode, 18 items): +# [0]: tag, [1]: amount, [2]: attachment_kind, [3]: attachment_scheme, +# [4-7]: ATTACHMENT, [8-11]: P2ID_SCRIPT_ROOT, [12-15]: SERIAL_NUM, +# [16]: account_id_suffix, [17]: account_id_prefix +const MINT_NOTE_NUM_STORAGE_ITEMS = 18 + +# P2ID output note constants +const OUTPUT_NOTE_TYPE_PUBLIC = 1 + +# P2ID attachment constants (the P2ID note created by the faucet has no attachment) +const P2ID_ATTACHMENT_SCHEME_NONE = 0 + +# Global memory pointers +# ------------------------------------------------------------------------------------------------- + +# Memory pointers for proof data layout (used by verify_leaf / get_leaf_value) +# Memory layout for proof data (loaded from advice map via pipe_preimage_to_memory). +# The proof data occupies addresses [PROOF_DATA_PTR .. PROOF_DATA_PTR + 535]: +# [0..255] smtProofLocalExitRoot (256 felts = 32 Keccak256 nodes) +# [256..511] smtProofRollupExitRoot (256 felts = 32 Keccak256 nodes) +# [512..519] globalIndex (8 felts) +# [520..527] mainnetExitRoot (8 felts) +# [528..535] rollupExitRoot (8 felts) + +const PROOF_DATA_PTR = 0 +const SMT_PROOF_LOCAL_EXIT_ROOT_PTR = PROOF_DATA_PTR +const SMT_PROOF_ROLLUP_EXIT_ROOT_PTR = SMT_PROOF_LOCAL_EXIT_ROOT_PTR + 256 +const GLOBAL_INDEX_PTR = SMT_PROOF_ROLLUP_EXIT_ROOT_PTR + 256 +const EXIT_ROOTS_PTR = GLOBAL_INDEX_PTR + 8 +const MAINNET_EXIT_ROOT_PTR = EXIT_ROOTS_PTR +const ROLLUP_EXIT_ROOT_PTR = EXIT_ROOTS_PTR + 8 + +# Memory layout for leaf data (loaded separately via get_leaf_value) +const LEAF_DATA_START_PTR = 0 + +# Memory pointers for piped advice map data (used by claim procedure) +const CLAIM_PROOF_DATA_START_PTR = 0 +const CLAIM_LEAF_DATA_START_PTR = 536 +const CLAIM_OUTPUT_NOTE_FAUCET_AMOUNT = 568 + +# Memory addresses for stored keys (used by claim procedure) +const CLAIM_PROOF_DATA_KEY_MEM_ADDR = 700 +const CLAIM_LEAF_DATA_KEY_MEM_ADDR = 704 + +# Memory addresses used to temporarily store leaf_index and source_bridge_network +# across proof verification, and later as input to the claim nullifier hash. +const CLAIM_LEAF_INDEX_MEM_ADDR = 900 +const CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR = 901 + +# Memory addresses for leaf data fields (derived from leaf data layout at CLAIM_LEAF_DATA_START_PTR=536) +const ORIGIN_TOKEN_ADDRESS_0 = 538 +const ORIGIN_TOKEN_ADDRESS_1 = 539 +const ORIGIN_TOKEN_ADDRESS_2 = 540 +const ORIGIN_TOKEN_ADDRESS_3 = 541 +const ORIGIN_TOKEN_ADDRESS_4 = 542 +const DESTINATION_ADDRESS_0 = 544 +const DESTINATION_ADDRESS_1 = 545 +const DESTINATION_ADDRESS_2 = 546 +const DESTINATION_ADDRESS_3 = 547 +const DESTINATION_ADDRESS_4 = 548 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 = 549 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 = 550 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_2 = 551 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_3 = 552 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_4 = 553 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_5 = 554 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_6 = 555 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_7 = 556 + +# Memory addresses for MINT note output construction +const MINT_NOTE_STORAGE_MEM_ADDR_0 = 800 +const MINT_NOTE_STORAGE_DEST_TAG = 800 +const MINT_NOTE_STORAGE_NATIVE_AMOUNT = 801 +const MINT_NOTE_STORAGE_ATTACHMENT_KIND = 802 +const MINT_NOTE_STORAGE_ATTACHMENT_SCHEME = 803 +const MINT_NOTE_STORAGE_ATTACHMENT = 804 +const MINT_NOTE_STORAGE_OUTPUT_SCRIPT_ROOT = 808 +const MINT_NOTE_STORAGE_OUTPUT_SERIAL_NUM = 812 +const MINT_NOTE_STORAGE_OUTPUT_NOTE_SUFFIX = 816 +const MINT_NOTE_STORAGE_OUTPUT_NOTE_PREFIX = 817 + +# Local memory offsets +# ------------------------------------------------------------------------------------------------- + +# Offsets in the local memory of the `calculate_root` procedure +const CUR_HASH_LO_LOCAL = 0 # first half of the current Keccak256 hash value +const CUR_HASH_HI_LOCAL = 4 # second half of the current Keccak256 hash value + +# Offsets in the local memory of the `claim` procedure +const CLAIM_DEST_ID_PREFIX_LOCAL = 0 +const CLAIM_DEST_ID_SUFFIX_LOCAL = 1 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Validates a claim against the AggLayer bridge and creates a MINT note for the aggfaucet. +#! +#! This procedure is called by the CLAIM note script. It validates the Merkle proof and then +#! looks up the faucet account ID from the token registry using the origin token address from +#! the leaf data, and creates a MINT note targeting the aggfaucet. +#! +#! The MINT note uses the standard MINT note pattern (public mode) with 18 storage items. +#! See `write_mint_note_storage` for the full storage layout. +#! +#! Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, faucet_mint_amount, pad(7)] +#! Outputs: [pad(16)] +#! +#! Advice map: { +#! PROOF_DATA_KEY => [ +#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts) +#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts) +#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) +#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts) +#! rollupExitRoot[8], // Rollup exit root hash (8 felts) +#! ], +#! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint32) +#! originNetwork[1], // Origin network identifier (1 felt, uint32) +#! originTokenAddress[5], // Origin token address (5 felts) +#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) +#! destinationAddress[5], // Destination address (5 felts) +#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) +#! metadata[8], // ABI encoded metadata (8 felts, fixed size) +#! padding[3], // padding (3 felts) +#! ], +#! } +#! +#! Panics if: +#! - the Merkle proof validation fails. +#! - the origin token address is not registered in the bridge's token registry. +#! +#! Invocation: call +@locals(2) # 0: dest_prefix, 1: dest_suffix +pub proc claim + # Write output note faucet amount to memory + movup.8 mem_store.CLAIM_OUTPUT_NOTE_FAUCET_AMOUNT + # => [PROOF_DATA_KEY, LEAF_DATA_KEY, pad(8)] + + # Check AdviceMap values hash to keys & write CLAIM inputs & DATA_KEYs to global memory + exec.claim_batch_pipe_double_words + # => [pad(16)] + + exec.get_destination_account_id_data + loc_store.CLAIM_DEST_ID_SUFFIX_LOCAL loc_store.CLAIM_DEST_ID_PREFIX_LOCAL + # => [pad(16)] + + # Validate CLAIM data + mem_loadw_be.CLAIM_PROOF_DATA_KEY_MEM_ADDR + # => [PROOF_DATA_KEY, pad(12)] + + swapw mem_loadw_be.CLAIM_LEAF_DATA_KEY_MEM_ADDR + # => [LEAF_DATA_KEY, PROOF_DATA_KEY, pad(8)] + + exec.verify_leaf_bridge + # => [pad(16)] + + # Look up the faucet account ID from the origin token address + exec.get_origin_token_address + # => [origin_token_addr(5), pad(16)] + + exec.bridge_config::lookup_faucet_by_token_address + # => [faucet_id_suffix, faucet_id_prefix, pad(16)] + + dup.1 dup.1 + # => [faucet_id_suffix, faucet_id_prefix, faucet_id_suffix, faucet_id_prefix, pad(16)] + + # Verify faucet_mint_amount matches the leaf data amount + exec.verify_claim_amount + # => [faucet_id_suffix, faucet_id_prefix, pad(16)] + + # Build MINT output note targeting the aggfaucet + loc_load.CLAIM_DEST_ID_PREFIX_LOCAL loc_load.CLAIM_DEST_ID_SUFFIX_LOCAL + # => [destination_id_suffix, destination_id_prefix, faucet_id_suffix, faucet_id_prefix, pad(16)] + + exec.build_mint_output_note + # => [pad(16)] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Computes the leaf value and verifies it against the AggLayer bridge state. +#! +#! Verification is delegated to `verify_leaf` to mimic the AggLayer Solidity contracts. +#! The steps involved in verification are: +#! 1. Compute the GER from the mainnet and rollup exit roots. +#! 2. Assert that the computed GER is valid (exists in storage). +#! 3. Process the global index to determine if it's a mainnet or rollup deposit. +#! 4. Verify the Merkle proof for the provided leaf-index tuple against the computed GER. +#! +#! Inputs: +#! Operand stack: [LEAF_DATA_KEY, PROOF_DATA_KEY] +#! Advice map: { +#! PROOF_DATA_KEY => [ +#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) +#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) +#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) +#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) +#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) +#! ], +#! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint32) +#! originNetwork[1], // Origin network identifier (1 felt, uint32) +#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) +#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) +#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) +#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) +#! metadata[8], // ABI encoded metadata (8 felts, fixed size) +#! padding[3], // padding (3 felts) - not used in the hash +#! ], +#! } +#! +#! Outputs: [] +#! +#! Panics if: +#! - the computed GER is invalid (never injected). +#! - the global index is invalid. +#! - the Merkle proof for the provided leaf-index tuple against the computed GER is invalid. +#! +#! Invocation: exec +proc verify_leaf_bridge + # get the leaf value. We have all the necessary leaf data in the advice map + exec.get_leaf_value + # => [LEAF_VALUE[8], PROOF_DATA_KEY] + + # duplicate the leaf value to use it later during the CGI chain hash computation + movupw.2 dupw.2 dupw.2 + # => [LEAF_VALUE[8], PROOF_DATA_KEY, LEAF_VALUE[8]] + + # delegate proof verification + exec.verify_leaf + # => [LEAF_VALUE[8]] + + # update the CGI chain hash + exec.update_cgi_chain_hash + # => [] +end + +#! Assert the global index is valid for a mainnet deposit. +#! +#! Each element of the global index is a LE-packed u32 felt (as produced by +#! `bytes_to_packed_u32_felts` / `GlobalIndex::to_elements()`). +#! +#! Inputs: [GLOBAL_INDEX[8]] +#! Outputs: [leaf_index] +#! +#! Panics if: +#! - the leading bits of the global index are not zero. +#! - the mainnet flag is not 1. +#! - the rollup index is not 0. +#! +#! Invocation: exec +pub proc process_global_index_mainnet + # the top 191 bits of the global index are zero + repeat.5 + assertz.err=ERR_LEADING_BITS_NON_ZERO + end + + # the next element is the mainnet flag (LE-packed u32) + # byte-swap to get the BE value, then assert it is exactly 1 + # => [mainnet_flag_le, rollup_index_le, leaf_index_le] + exec.utils::swap_u32_bytes + assert.err=ERR_BRIDGE_NOT_MAINNET + + # the rollup index must be zero for a mainnet deposit + # (zero is byte-order-independent, so no swap needed) + assertz.err=ERR_ROLLUP_INDEX_NON_ZERO + + # the leaf index is the last element; byte-swap from LE to BE to get the actual index + exec.utils::swap_u32_bytes + # => [leaf_index] +end + +#! Assert the global index is valid for a rollup deposit. +#! +#! Each element of the global index is a LE-packed u32 felt (as produced by +#! `bytes_to_packed_u32_felts` / `GlobalIndex::to_elements()`). +#! +#! Inputs: [GLOBAL_INDEX[8]] +#! Outputs: [leaf_index, rollup_index] +#! +#! Panics if: +#! - the leading bits of the global index are not zero. +#! - the mainnet flag is not 0. +#! +#! Invocation: exec +pub proc process_global_index_rollup + # the top 191 bits of the global index are zero + repeat.5 + assertz.err=ERR_LEADING_BITS_NON_ZERO + end + + # the next element is the mainnet flag (LE-packed u32) + # for a rollup deposit it must be exactly 0; zero is byte-order-independent, + # so no swap is needed before asserting + # => [mainnet_flag_le, rollup_index_le, leaf_index_le] + assertz.err=ERR_BRIDGE_NOT_ROLLUP + + # byte-swap rollup_index from LE to BE + exec.utils::swap_u32_bytes + # => [rollup_index, leaf_index_le] + + # byte-swap leaf_index from LE to BE + swap exec.utils::swap_u32_bytes + # => [leaf_index, rollup_index] +end + +#! Computes the Global Exit Tree (GET) root from the mainnet and rollup exit roots. +#! +#! The mainnet exit root is expected at `exit_roots_ptr` and +#! the rollup exit root is expected at `exit_roots_ptr + 8`. +#! +#! Inputs: [exit_roots_ptr] +#! Outputs: [GER_ROOT[8]] +#! +#! Invocation: exec +pub proc compute_ger(exit_roots_ptr: MemoryAddress) -> DoubleWord + push.64 swap + # => [exit_roots_ptr, len_bytes] + exec.keccak256::hash_bytes + # => [GER_ROOT[8]] +end + +#! Verifies a Merkle proof for a leaf value against a root. +#! +#! Verifies that the root, computed using the provided Merkle path and the leaf with its index, +#! matches the provided root. +#! +#! Inputs: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx, expected_root_ptr] +#! Outputs: [verification_flag] +#! +#! Where: +#! - expected_root_ptr is the pointer to the memory where the expected SMT root is stored. +#! - [LEAF_VALUE_LO, LEAF_VALUE_HI] is the leaf for the provided Merkle path. +#! - merkle_path_ptr is the pointer to the memory where the merkle path is stored. This path is +#! represented as 32 Keccak256Digest values (64 words). +#! - leaf_idx is the index of the provided leaf in the SMT. +#! - [ROOT_LO, ROOT_HI] is the calculated root. +#! - verification_flag is the binary flag indicating whether the verification was successful. +pub proc verify_merkle_proof( + leaf_value: DoubleWord, + merkle_path_ptr: MemoryAddress, + leaf_idx: u32, + expected_root_ptr: MemoryAddress +) -> i1 + # calculate the root of the SMT + exec.calculate_root + # => [CALCULATED_ROOT_LO, CALCULATED_ROOT_HI, expected_root_ptr] + + # load the expected root onto the stack + movup.8 exec.utils::mem_load_double_word + # => [EXPECTED_ROOT_LO, EXPECTED_ROOT_HI, CALCULATED_ROOT_LO, CALCULATED_ROOT_HI] + + # assert the roots are equal + swapw.3 exec.word::eq + # => [exp_hi_equal_calc_hi, CALCULATED_ROOT_LO, EXPECTED_ROOT_LO] + + movdn.8 exec.word::eq and + # => [verification_flag] +end +#! Verifies that the faucet_mint_amount matches the raw U256 amount from the leaf data, +#! scaled down by the faucet's scale factor. +#! +#! This procedure: +#! 1. Performs an FPI call to the faucet's `get_scale` procedure to retrieve the scale factor. +#! 2. Loads the raw U256 amount from the leaf data in memory. +#! 3. Calls `verify_u256_to_native_amount_conversion` to assert that: +#! faucet_mint_amount == floor(raw_amount / 10^scale) +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix] +#! Outputs: [] +#! +#! Panics if: +#! - the FPI call to the faucet's get_scale fails. +#! - the faucet_mint_amount does not match the expected scaled-down value. +#! +#! Invocation: exec +proc verify_claim_amount + # Step 1: Pad the stack explicitly for FPI call (get_scale takes no inputs) + padw padw + movup.9 movup.9 + padw padw + movup.9 movup.9 + # => [faucet_id_suffix, faucet_id_prefix, pad(16)] + + # Step 2: FPI call to faucet's get_scale procedure + procref.agglayer_faucet::get_scale + # => [PROC_MAST_ROOT(4), faucet_id_suffix, faucet_id_prefix, pad(16)] + + movup.5 movup.5 + # => [faucet_id_suffix, faucet_id_prefix, PROC_MAST_ROOT(4), pad(16)] + + exec.tx::execute_foreign_procedure + # => [scale, pad(15)] + + # Clean up FPI output padding, keeping only scale + movdn.15 dropw dropw dropw drop drop drop + # => [scale] + + # Step 3: Load the raw U256 amount from leaf data memory + exec.get_raw_claim_amount + # => [x7, x6, x5, x4, x3, x2, x1, x0, scale] + + # Step 4: Load faucet_mint_amount (y) and position it for verification + mem_load.CLAIM_OUTPUT_NOTE_FAUCET_AMOUNT + # => [y, x7, x6, x5, x4, x3, x2, x1, x0, scale] + + movdn.9 + # => [x7, x6, x5, x4, x3, x2, x1, x0, scale, y] + + # Step 5: Verify that y = floor(x / 10^scale) + exec.asset_conversion::verify_u256_to_native_amount_conversion + # => [] +end + +#! Given the leaf data key, loads the leaf data from advice map to memory, packs the data in-place, +#! and computes the leaf value by hashing the packed bytes. +#! +#! Inputs: +#! Operand stack: [LEAF_DATA_KEY] +#! Advice map: { +#! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint8) +#! originNetwork[1], // Origin network identifier (1 felt, uint32) +#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) +#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) +#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) +#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) +#! metadata_hash[8], // Metadata hash (8 felts, bytes32 as 8 u32 felts) +#! padding[3], // padding (3 felts) - not used in the hash +#! ], +#! } +#! Outputs: [LEAF_VALUE[8]] +#! +#! Invocation: exec +pub proc get_leaf_value(leaf_data_key: word) -> DoubleWord + adv.push_mapval + # => [LEAF_DATA_KEY] + + push.LEAF_DATA_START_PTR push.CLAIM_LEAF_DATA_WORD_LEN + exec.mem::pipe_preimage_to_memory drop + # => [] + + # compute the leaf value for elements in memory starting at LEAF_DATA_START_PTR + push.LEAF_DATA_START_PTR + exec.leaf_utils::compute_leaf_value + # => [LEAF_VALUE[8]] +end + +#! Verify leaf, check that it has not been claimed, and mark it as claimed. +#! +#! Inputs: [LEAF_VALUE[8], PROOF_DATA_KEY] +#! Outputs: [] +#! +#! Panics if: +#! - the computed GER is invalid (never injected). +#! - the global index is invalid. +#! - the Merkle proof for the provided leaf-index tuple against the computed GER is invalid. +#! - the claim has already been spent. +#! +#! Invocation: exec +proc verify_leaf + movupw.2 + # load proof data from the advice map into memory + adv.push_mapval + # => [PROOF_DATA_KEY, LEAF_VALUE[8]] + + push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR push.CLAIM_PROOF_DATA_WORD_LEN + exec.mem::pipe_preimage_to_memory drop + + # 1. compute GER from mainnet + rollup exit roots + push.EXIT_ROOTS_PTR + # => [exit_roots_ptr, LEAF_VALUE[8]] + exec.compute_ger + # => [GER[8], LEAF_VALUE[8]] + + # 2. assert the GER is valid + exec.bridge_config::assert_valid_ger + # => [LEAF_VALUE[8]] + + # 3. load global index from memory + push.GLOBAL_INDEX_PTR exec.utils::mem_load_double_word + # => [GLOBAL_INDEX[8], LEAF_VALUE[8]] + + # Determine if we're dealing with a deposit from mainnet or from a rollup. + # The global index is laid out as: + # [gi0, gi1, gi2, gi3, gi4, mainnet_flag_le, rollup_index_le, leaf_index_le] + # gi0 is on top (position 0). The mainnet flag is at stack position 5. + + # Duplicate the mainnet flag element, byte-swap from LE to BE, + # assert it is a valid boolean (0 or 1), then use it to branch. + dup.5 exec.utils::swap_u32_bytes dup + # => [mainnet_flag, mainnet_flag, GLOBAL_INDEX[8], LEAF_VALUE[8]] + + u32lt.2 assert.err=ERR_MAINNET_FLAG_INVALID + # => [mainnet_flag, GLOBAL_INDEX[8], LEAF_VALUE[8]] + + if.true + # ==================== MAINNET DEPOSIT ==================== + exec.process_global_index_mainnet + # => [leaf_index, LEAF_VALUE[8]] + + # Save leaf_index to memory for set_and_check_claimed after proof verification + dup mem_store.CLAIM_LEAF_INDEX_MEM_ADDR + # => [leaf_index, LEAF_VALUE[8]] + + # verify single Merkle proof: leaf against mainnetExitRoot + push.MAINNET_EXIT_ROOT_PTR swap + push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR + # => [smt_proof_ptr, leaf_index, mainnet_exit_root_ptr, LEAF_VALUE[8]] + + movdn.10 movdn.10 movdn.10 + # => [LEAF_VALUE[8], smt_proof_ptr, leaf_index, mainnet_exit_root_ptr] + + exec.verify_merkle_proof + # => [verification_flag] + + assert.err=ERR_SMT_ROOT_VERIFICATION_FAILED + # => [] + + # For mainnet deposits, source_bridge_network = 0 + # globalIndex = uint256(leafIndex), so source_bridge_network is always 0 + push.0 mem_store.CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR + # => [] + else + # ==================== ROLLUP DEPOSIT ==================== + # mainnet_flag = 0; extract rollup_index and leaf_index via helper, + # then do two-level verification + exec.process_global_index_rollup + # => [leaf_index, rollup_index, LEAF_VALUE[8]] + + # Save leaf_index and rollup_index to memory for set_and_check_claimed + # mem[CLAIM_LEAF_INDEX_MEM_ADDR] = leaf_index + # mem[CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR] = rollup_index (temporarily, updated below) + dup mem_store.CLAIM_LEAF_INDEX_MEM_ADDR + # => [leaf_index, rollup_index, LEAF_VALUE[8]] + + dup.1 mem_store.CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR + # => [leaf_index, rollup_index, LEAF_VALUE[8]] + + # Step 1: calculate_root(leafValue, smtProofLocalExitRoot, leafIndex) -> localExitRoot + # calculate_root expects: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_index] + movdn.9 movdn.9 + # => [LEAF_VALUE[8], leaf_index, rollup_index] + + # Insert smt_proof_local_ptr before leaf_index + push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR movdn.8 + # => [LEAF_VALUE[8], smt_proof_local_ptr, leaf_index, rollup_index] + + exec.calculate_root + # => [LOCAL_EXIT_ROOT_LO, LOCAL_EXIT_ROOT_HI, rollup_index] + + # Step 2: verify_merkle_proof(localExitRoot, smtProofRollupExitRoot, rollupIndex, rollupExitRootPtr) + push.ROLLUP_EXIT_ROOT_PTR movdn.9 + # => [LOCAL_EXIT_ROOT_LO, LOCAL_EXIT_ROOT_HI, rollup_index, rollup_exit_root_ptr] + + push.SMT_PROOF_ROLLUP_EXIT_ROOT_PTR movdn.8 + + # => [LOCAL_EXIT_ROOT[8], smt_proof_rollup_ptr, rollup_index, rollup_exit_root_ptr] + + exec.verify_merkle_proof + # => [verification_flag] + + assert.err=ERR_SMT_ROOT_VERIFICATION_FAILED + # => [] + + # For rollup deposits, source_bridge_network = rollup_index + 1 + # Compute source_bridge_network and store it to memory + mem_load.CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR u32overflowing_add.1 assertz.err=ERR_SOURCE_BRIDGE_NETWORK_OVERFLOW + # => [source_bridge_network] + + mem_store.CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR + # => [] + end + + # 4. Check the claim has not been spent and mark it as spent. + # Load leaf_index and source_bridge_network from memory. + mem_load.CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR + mem_load.CLAIM_LEAF_INDEX_MEM_ADDR + # => [leaf_index, source_bridge_network] + + exec.set_and_check_claimed + # => [] +end + +#! Computes the claim nullifier as RPO(leaf_index, source_bridge_network), then checks +#! that the claim has not been spent and marks it as spent. +#! +#! This mimics the Solidity `_setAndCheckClaimed(leafIndex, sourceBridgeNetwork)` function. +#! See: https://github.com/agglayer/agglayer-contracts/blob/60d06fc3224792ce55dc2690d66b6719a73398e7/contracts/v2/PolygonZkEVMBridgeV2.sol#L987 +#! +#! Inputs: [leaf_index, source_bridge_network] +#! Outputs: [] +#! +#! Panics if: +#! - the CLAIM note has already been spent (nullifier already exists in the nullifier map). +#! +#! Invocation: exec +proc set_and_check_claimed + # Write both values to memory for hashing + # mem[CLAIM_LEAF_INDEX_MEM_ADDR] = leaf_index + # mem[CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR] = source_bridge_network + mem_store.CLAIM_LEAF_INDEX_MEM_ADDR + # => [source_bridge_network] + + mem_store.CLAIM_SOURCE_BRIDGE_NETWORK_MEM_ADDR + # => [] + + # Hash the two elements using Poseidon2 to produce the nullifier + push.2 push.CLAIM_LEAF_INDEX_MEM_ADDR + exec.poseidon2::hash_elements + # => [NULLIFIER] + + exec.assert_claim_not_spent + # => [] +end + +#! Checks that the CLAIM note has not already been spent, and marks it as spent +#! by storing [1, 0, 0, 0] in the CLAIM_NULLIFIERS_SLOT map. +#! +#! The nullifier is computed as RPO(leaf_index, source_bridge_network), which uniquely +#! identifies a claim in the Global Exit Root (GER) as per the AggLayer protocol. +#! +#! Inputs: [NULLIFIER] +#! Outputs: [] +#! +#! Panics if: +#! - the CLAIM note has already been spent (nullifier already exists in the nullifier map). +#! +#! Invocation: exec +proc assert_claim_not_spent(nullifier: word) + push.IS_CLAIMED_FLAG + # => [IS_CLAIMED_FLAG, NULLIFIER] + + swapw + # => [NULLIFIER, IS_CLAIMED_FLAG] + + push.CLAIM_NULLIFIERS_SLOT[0..2] + # => [slot_prefix, slot_suffix, NULLIFIER, IS_CLAIMED_FLAG] + + exec.native_account::set_map_item + # => [OLD_VALUE] + + # OLD_VALUE[0] is the claimed flag: 0 if unclaimed, 1 if already claimed + assertz.err=ERR_CLAIM_ALREADY_SPENT drop drop drop + # => [] +end + +# Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY] +# Outputs: [] +proc claim_batch_pipe_double_words + # 1) Verify PROOF_DATA_KEY + mem_storew_be.CLAIM_PROOF_DATA_KEY_MEM_ADDR + adv.push_mapval + # => [PROOF_DATA_KEY] + + push.CLAIM_PROOF_DATA_START_PTR push.CLAIM_PROOF_DATA_WORD_LEN + exec.mem::pipe_double_words_preimage_to_memory drop + + # 2) Verify LEAF_DATA_KEY + mem_storew_be.CLAIM_LEAF_DATA_KEY_MEM_ADDR + adv.push_mapval + # => [LEAF_DATA_KEY] + + push.CLAIM_LEAF_DATA_START_PTR push.CLAIM_LEAF_DATA_WORD_LEN + exec.mem::pipe_double_words_preimage_to_memory drop +end + +#! Extracts the destination account ID as address[5] from memory. +#! +#! This procedure reads the destination address from the leaf data and converts it from +#! Ethereum address format to AccountId format (suffix, prefix). +#! +#! Inputs: [] +#! Outputs: [suffix, prefix] +#! +#! Invocation: exec +proc get_destination_account_id_data + mem_load.DESTINATION_ADDRESS_4 + mem_load.DESTINATION_ADDRESS_3 + mem_load.DESTINATION_ADDRESS_2 + mem_load.DESTINATION_ADDRESS_1 + mem_load.DESTINATION_ADDRESS_0 + # => [address[5]] + + exec.eth_address::to_account_id + # => [suffix, prefix] +end + +# Inputs: [] +# Outputs: [U256[0], U256[1]] +proc get_raw_claim_amount + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_7 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_6 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_5 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_4 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_3 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_2 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 +end + +#! Reads the origin token address (5 felts) from the leaf data in memory. +#! +#! Inputs: [] +#! Outputs: [origin_token_addr(5)] +#! +#! Invocation: exec +proc get_origin_token_address + mem_load.ORIGIN_TOKEN_ADDRESS_4 + mem_load.ORIGIN_TOKEN_ADDRESS_3 + mem_load.ORIGIN_TOKEN_ADDRESS_2 + mem_load.ORIGIN_TOKEN_ADDRESS_1 + mem_load.ORIGIN_TOKEN_ADDRESS_0 + # => [origin_token_addr(5)] +end + +#! Builds a PUBLIC MINT output note targeting the aggfaucet. +#! +#! The MINT note uses public mode (18 storage items) so the AggFaucet creates +#! a PUBLIC P2ID note on consumption. This procedure orchestrates three steps: +#! 1. Write all 18 MINT note storage items to global memory. +#! 2. Build the MINT note recipient digest from the storage. +#! 3. Create the output note, and set the attachment. +#! +#! Inputs: [destination_id_suffix, destination_id_prefix, faucet_id_suffix, faucet_id_prefix] +#! Outputs: [] +#! +#! Invocation: exec +proc build_mint_output_note + # Step 1: Write all 18 MINT note storage items to global memory + exec.write_mint_note_storage + # => [faucet_id_prefix, faucet_id_suffix] + + # Step 2: Build the MINT note recipient digest + exec.build_mint_recipient + # => [MINT_RECIPIENT, faucet_id_prefix, faucet_id_suffix] + + # Step 3: Create the output note and set the faucet attachment + exec.create_mint_note_with_attachment + # => [] +end + +#! Writes all 18 MINT note storage items to global memory. +#! +#! Storage layout: +#! - [0]: tag (note tag for the P2ID output note, targeting the destination account) +#! - [1]: amount (the scaled-down Miden amount to mint) +#! - [2]: attachment_kind (0 = no attachment) +#! - [3]: attachment_scheme (0 = no attachment) +#! - [4-7]: ATTACHMENT ([0, 0, 0, 0]) +#! - [8-11]: P2ID_SCRIPT_ROOT (script root of the P2ID note) +#! - [12-15]: SERIAL_NUM (serial number for the P2ID note, derived from PROOF_DATA_KEY) +#! - [16]: account_id_suffix (destination account suffix) +#! - [17]: account_id_prefix (destination account prefix) +#! +#! Inputs: [destination_id_suffix, destination_id_prefix] +#! Outputs: [] +#! +#! Invocation: exec +proc write_mint_note_storage + # Write P2ID storage items first (before prefix is consumed): [16..17] + # Write destination_id_suffix [16] + dup mem_store.MINT_NOTE_STORAGE_OUTPUT_NOTE_SUFFIX + # => [destination_id_suffix, destination_id_prefix] + + # Write destination_id_prefix [17] + dup.1 mem_store.MINT_NOTE_STORAGE_OUTPUT_NOTE_PREFIX + # => [destination_id_suffix, destination_id_prefix] + + drop + # => [destination_id_prefix] + + # Get the native amount from the pre-computed miden_claim_amount + mem_load.CLAIM_OUTPUT_NOTE_FAUCET_AMOUNT + # => [native_amount, destination_id_prefix] + + # Compute the note tag for the destination account (consumes prefix) + swap + # => [destination_id_prefix, native_amount] + + exec.note_tag::create_account_target + # => [dest_tag, native_amount] + + # Write tag to MINT note storage [0] + mem_store.MINT_NOTE_STORAGE_DEST_TAG + # => [native_amount] + + # Write amount to MINT note storage [1] + mem_store.MINT_NOTE_STORAGE_NATIVE_AMOUNT + # => [] + + # Write P2ID attachment fields (the P2ID note has no attachment) + # attachment_kind = NONE [2] + push.ATTACHMENT_KIND_NONE mem_store.MINT_NOTE_STORAGE_ATTACHMENT_KIND + # => [] + + # attachment_scheme = NONE [3] + push.P2ID_ATTACHMENT_SCHEME_NONE mem_store.MINT_NOTE_STORAGE_ATTACHMENT_SCHEME + # => [] + + # ATTACHMENT = empty word [4..7] + padw mem_storew_le.MINT_NOTE_STORAGE_ATTACHMENT dropw + # => [] + + # Write P2ID_SCRIPT_ROOT to MINT note storage [8..11] + procref.::miden::standards::notes::p2id::main + # => [P2ID_SCRIPT_ROOT] + + mem_storew_le.MINT_NOTE_STORAGE_OUTPUT_SCRIPT_ROOT dropw + # => [] + + # Write SERIAL_NUM (PROOF_DATA_KEY) to MINT note storage [12..15] + mem_loadw_be.CLAIM_PROOF_DATA_KEY_MEM_ADDR + # => [SERIAL_NUM] + + mem_storew_le.MINT_NOTE_STORAGE_OUTPUT_SERIAL_NUM dropw + # => [] +end + +#! Builds the MINT note recipient digest from the storage items already written to global memory. +#! +#! Uses the MINT note script root and PROOF_DATA_KEY as serial number, then calls +#! `note::build_recipient` with the storage pointer and item count. +#! +#! Inputs: [] +#! Outputs: [MINT_RECIPIENT] +#! +#! Invocation: exec +proc build_mint_recipient + # Get the MINT note script root + procref.::miden::standards::notes::mint::main + # => [MINT_SCRIPT_ROOT] + + # Generate a serial number for the MINT note (use PROOF_DATA_KEY) + padw mem_loadw_be.CLAIM_PROOF_DATA_KEY_MEM_ADDR + # => [MINT_SERIAL_NUM, MINT_SCRIPT_ROOT] + + # Build the MINT note recipient + push.MINT_NOTE_NUM_STORAGE_ITEMS + # => [num_storage_items, MINT_SERIAL_NUM, MINT_SCRIPT_ROOT] + + push.MINT_NOTE_STORAGE_MEM_ADDR_0 + # => [storage_ptr, num_storage_items, MINT_SERIAL_NUM, MINT_SCRIPT_ROOT] + + exec.note::build_recipient + # => [MINT_RECIPIENT] +end + + + +#! Creates the MINT output note and sets the NetworkAccountTarget attachment on it. +#! +#! Creates a public output note with no assets, and sets the attachment so only the +#! target faucet can consume the note. +#! +#! Inputs: [MINT_RECIPIENT, faucet_id_prefix, faucet_id_suffix] +#! Outputs: [] +#! +#! Invocation: exec +proc create_mint_note_with_attachment + # Create the MINT output note targeting the faucet + push.OUTPUT_NOTE_TYPE_PUBLIC + # => [note_type, MINT_RECIPIENT, faucet_id_prefix, faucet_id_suffix] + + # Set tag to DEFAULT + push.DEFAULT_TAG + # => [note_type, MINT_RECIPIENT, faucet_id_prefix, faucet_id_suffix] + + # Create the output note (no assets - MINT notes carry no assets) + exec.output_note::create + # => [note_idx, faucet_id_prefix, faucet_id_suffix] + + movdn.2 + # => [faucet_id_prefix, faucet_id_suffix, note_idx] + + # Set the attachment on the MINT note to target the faucet account + # NetworkAccountTarget attachment: targets the faucet so only it can consume the note + # network_account_target::new expects [prefix, suffix, exec_hint] + # and returns [attachment_scheme, attachment_kind, ATTACHMENT] + push.ALWAYS # exec_hint = ALWAYS + movdn.2 + # => [faucet_id_prefix, faucet_id_suffix, exec_hint, note_idx] + + exec.network_account_target::new + # => [attachment_scheme, attachment_kind, ATTACHMENT, note_idx] + + # Rearrange for set_attachment: [note_idx, attachment_scheme, attachment_kind, ATTACHMENT] + movup.6 + # => [note_idx, attachment_scheme, attachment_kind, ATTACHMENT(4)] + + exec.output_note::set_attachment + # => [] +end + +#! Computes the root of the SMT based on the provided Merkle path, leaf value and leaf index. +#! +#! Inputs: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx] +#! Outputs: [ROOT_LO, ROOT_HI] +#! +#! Where: +#! - [LEAF_VALUE_LO, LEAF_VALUE_HI] is the leaf for the provided Merkle path. +#! - merkle_path_ptr is the pointer to the memory where the merkle path is stored. This path is +#! represented as 32 Keccak256Digest values (64 words). +#! - leaf_idx is the index of the provided leaf in the SMT. +#! - [ROOT_LO, ROOT_HI] is the calculated root. +@locals(8) # current hash +proc calculate_root( + leaf_value: DoubleWord, + merkle_path_ptr: MemoryAddress, + leaf_idx: u32 +) -> DoubleWord + # Local memory stores the current hash. It is initialized to the leaf value + loc_storew_le.CUR_HASH_LO_LOCAL dropw loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [merkle_path_ptr, leaf_idx] + + # Merkle path is guaranteed to contain 32 nodes + repeat.32 + # load the Merkle path node word-by-word in LE-felt order + padw dup.4 mem_loadw_le + # => [PATH_NODE_LO, merkle_path_ptr, leaf_idx] + padw dup.8 add.4 mem_loadw_le + swapw + # => [PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr, leaf_idx] + + # advance merkle_path_ptr by 8 (two words = 8 element addresses) + movup.8 add.8 movdn.8 + # => [PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr+8, leaf_idx] + + # determine whether the last `leaf_idx` bit is 1 (is `leaf_idx` odd) + dup.9 u32and.1 + # => [is_odd, PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr+8, leaf_idx] + + # load the hash respective to the current height from the local memory + padw loc_loadw_le.CUR_HASH_HI_LOCAL padw loc_loadw_le.CUR_HASH_LO_LOCAL + # => [CURR_HASH_LO, CURR_HASH_HI, is_odd, PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr, leaf_idx] + + # move the `is_odd` flag to the top of the stack + movup.8 + # => [is_odd, CURR_HASH_LO, CURR_HASH_HI, PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr, leaf_idx] + + # if is_odd flag equals 1 (`leaf_idx` is odd), change the order of the nodes on the stack + if.true + # rearrange the hashes: current position of the hash is odd, so it should be on the + # right + swapdw + # => [PATH_NODE_LO, PATH_NODE_HI, CURR_HASH_LO, CURR_HASH_HI, merkle_path_ptr, leaf_idx] + end + + # compute the next height hash + exec.keccak256::merge + # => [CURR_HASH_LO', CURR_HASH_HI', merkle_path_ptr, leaf_idx] + + # store the resulting hash to the local memory and drop the hash words + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [merkle_path_ptr, leaf_idx] + + # update the `leaf_idx` (shift it right by 1 bit) + swap u32shr.1 swap + # => [merkle_path_ptr, leaf_idx>>1] + end + + # after all 32 hashes have been computed, the current hash stored in local memory represents + # the root of the SMT, which should be returned + drop drop + padw loc_loadw_le.CUR_HASH_HI_LOCAL padw loc_loadw_le.CUR_HASH_LO_LOCAL + # => [ROOT_LO, ROOT_HI] +end + +#! Updates the claimed global index (CGI) chain hash by recomputing it and storing into the +#! corresponding storage slot. +#! +#! The resulting hash is computed as a sequential hash of leaf value, global index, and previous +#! value of the CGI chain hash: +#! NEW_CGI_CHAIN_HASH[8] = Keccak256(OLD_CGI_CHAIN_HASH[8], Keccak256(GLOBAL_INDEX[8], LEAF_VALUE[8])) +#! +#! Inputs: [LEAF_VALUE[8]] +#! Outputs: [] +#! +#! Invocation: exec +proc update_cgi_chain_hash + # load the required CGI chain data values onto the stack + exec.load_cgi_chain_hash_data + # => [GLOBAL_INDEX[8], LEAF_VALUE[8], OLD_CGI_CHAIN_HASH[8]] + + # compute the new CGI chain hash + exec.keccak256::merge swapdw + # => [OLD_CGI_CHAIN_HASH[8], Keccak256(GLOBAL_INDEX, LEAF_VALUE)] + + exec.keccak256::merge + # => [NEW_CGI_CHAIN_HASH[8]] + + # store the new CGI chain hash + exec.store_cgi_chain_hash + # => [] +end + +#! Loads the old CGI chain hash, the leaf value, and the global index onto the stack as a +#! preparation for the new CGI chain hash computation. +#! +#! Inputs: [LEAF_VALUE[8]] +#! Outputs: [GLOBAL_INDEX[8], LEAF_VALUE[8], OLD_CGI_CHAIN_HASH[8]] +#! +#! Invocation: exec +proc load_cgi_chain_hash_data + # load the old CGI chain hash onto the stack + push.CGI_CHAIN_HASH_HI_SLOT_NAME[0..2] + exec.active_account::get_item + # => [OLD_CGI_CHAIN_HASH_HI, LEAF_VALUE[8]] + + push.CGI_CHAIN_HASH_LO_SLOT_NAME[0..2] + exec.active_account::get_item + # => [OLD_CGI_CHAIN_HASH[8], LEAF_VALUE[8]] + + # move the leaf value to the top of the stack + swapdw + # => [LEAF_VALUE[8], OLD_CGI_CHAIN_HASH[8]] + + # load the global index onto the stack + push.GLOBAL_INDEX_PTR exec.utils::mem_load_double_word + # => [GLOBAL_INDEX[8], LEAF_VALUE[8], OLD_CGI_CHAIN_HASH[8]] +end + +#! Stores the computed global index (CGI) chain hash into the corresponding storage slots. +#! +#! Inputs: [NEW_CGI_CHAIN_HASH_LO, NEW_CGI_CHAIN_HASH_HI] +#! Outputs: [] +#! +#! Invocation: exec +proc store_cgi_chain_hash + push.CGI_CHAIN_HASH_LO_SLOT_NAME[0..2] + exec.native_account::set_item dropw + # => [NEW_CGI_CHAIN_HASH_HI] + + push.CGI_CHAIN_HASH_HI_SLOT_NAME[0..2] + exec.native_account::set_item dropw + # => [] +end \ No newline at end of file diff --git a/crates/miden-agglayer/asm/agglayer/bridge/bridge_out.masm b/crates/miden-agglayer/asm/agglayer/bridge/bridge_out.masm new file mode 100644 index 0000000000..1e4755fa3e --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/bridge_out.masm @@ -0,0 +1,565 @@ +use miden::protocol::active_note +use miden::protocol::active_account +use miden::protocol::asset +use miden::protocol::native_account +use miden::protocol::note +use miden::protocol::tx +use miden::standards::data_structures::double_word_array +use miden::standards::attachments::network_account_target +use miden::standards::note_tag::DEFAULT_TAG +use miden::standards::note::execution_hint::ALWAYS +use miden::protocol::types::MemoryAddress +use miden::protocol::output_note +use miden::core::crypto::hashes::keccak256 +use miden::core::crypto::hashes::poseidon2 +use miden::core::word +use agglayer::common::utils +use agglayer::faucet -> agglayer_faucet +use agglayer::bridge::bridge_config +use agglayer::bridge::leaf_utils +use agglayer::bridge::mmr_frontier32_keccak +use agglayer::common::utils::EthereumAddressFormat + +# CONSTANTS +# ================================================================================================= + +# Storage slots +# ------------------------------------------------------------------------------------------------- + +# Storage slot constants for the LET (Local Exit Tree). +# The frontier is stored as a double-word array in a map slot. +# The root and num_leaves are stored in separate value slots. +const LET_FRONTIER_SLOT=word("agglayer::bridge::let_frontier") +const LET_ROOT_LO_SLOT=word("agglayer::bridge::let_root_lo") +const LET_ROOT_HI_SLOT=word("agglayer::bridge::let_root_hi") +const LET_NUM_LEAVES_SLOT=word("agglayer::bridge::let_num_leaves") + +# Global memory pointers +# ------------------------------------------------------------------------------------------------- + +const LEAF_DATA_START_PTR=44 +# Memory pointer for loading the LET (Local Exit Tree) frontier into memory. +# The memory layout at this address matches what append_and_update_frontier expects: +# [num_leaves, 0, 0, 0, [[FRONTIER_NODE_LO, FRONTIER_NODE_HI]; 32]] +const LET_FRONTIER_MEM_PTR=100 + +# Field offsets +# ------------------------------------------------------------------------------------------------- + +# Leaf data field offsets (relative to LEAF_DATA_START_PTR) +const LEAF_TYPE_OFFSET=0 +const ORIGIN_NETWORK_OFFSET=1 +const ORIGIN_TOKEN_ADDRESS_OFFSET=2 +const DESTINATION_NETWORK_OFFSET=7 +const DESTINATION_ADDRESS_OFFSET=8 +const AMOUNT_OFFSET=13 +const METADATA_HASH_OFFSET=21 +const PADDING_OFFSET=29 + +# Local memory offsets +# ------------------------------------------------------------------------------------------------- + +# bridge_out memory locals +const BRIDGE_OUT_BURN_ASSET_LOC=0 +const DESTINATION_ADDRESS_0_LOC=8 +const DESTINATION_ADDRESS_1_LOC=9 +const DESTINATION_ADDRESS_2_LOC=10 +const DESTINATION_ADDRESS_3_LOC=11 +const DESTINATION_ADDRESS_4_LOC=12 +const DESTINATION_NETWORK_LOC=13 + +# create_burn_note memory locals +const CREATE_BURN_NOTE_BURN_ASSET_LOC=0 +const ATTACHMENT_LOC=8 +const ATTACHMENT_SCHEME_LOC=12 +const ATTACHMENT_KIND_LOC=13 + +# Other constants +# ------------------------------------------------------------------------------------------------- + +const LEAF_TYPE_ASSET=0 +const PUBLIC_NOTE=1 +const BURN_NOTE_NUM_STORAGE_ITEMS=0 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Bridges an asset out via the AggLayer. +#! +#! This procedure handles the complete bridge-out operation: +#! 1. Validates the asset's faucet is registered in the bridge's faucet registry +#! 2. Queries the faucet for origin asset conversion data via FPI +#! 3. Builds the leaf data (origin token, destination, amount, metadata) +#! 4. Computes Keccak hash and adds it to the MMR frontier +#! 5. Creates a BURN note with the bridged out asset +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, dest_network_id, dest_address(5), pad(2)] +#! Outputs: [pad(16)] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to be bridged out. +#! - ASSET_VALUE is the value of the asset to be bridged out. +#! - dest_network_id is the u32 destination network/chain ID. +#! - dest_address(5) are 5 u32 values representing a 20-byte Ethereum address. +#! +#! Invocation: call +@locals(14) +pub proc bridge_out + # => [ASSET_KEY, ASSET_VALUE, dest_network_id, dest_address(5), pad(2)] + + # Save ASSET to local memory for later BURN note creation + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::store + # => [dest_network_id, dest_address(5), pad(10)] + + loc_store.DESTINATION_NETWORK_LOC + loc_store.DESTINATION_ADDRESS_0_LOC + loc_store.DESTINATION_ADDRESS_1_LOC + loc_store.DESTINATION_ADDRESS_2_LOC + loc_store.DESTINATION_ADDRESS_3_LOC + loc_store.DESTINATION_ADDRESS_4_LOC + # => [pad(16)] + + # --- 1. Validate faucet registration and convert asset via FPI --- + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(16)] + + exec.convert_asset + # => [AMOUNT_U256(8), origin_addr(5), origin_network, pad(16)] + + # --- 2. Write all leaf data fields to memory (except metadata hash) --- + # Store scaled AMOUNT (8 felts) + push.LEAF_DATA_START_PTR push.AMOUNT_OFFSET add + movdn.8 + exec.utils::mem_store_double_word_unaligned + # => [origin_addr(5), origin_network, pad(16)] + + # Store origin_token_address and origin_network + push.LEAF_DATA_START_PTR push.ORIGIN_TOKEN_ADDRESS_OFFSET add + exec.write_address_to_memory + # => [origin_network, pad(16)] + + push.LEAF_DATA_START_PTR push.ORIGIN_NETWORK_OFFSET add + mem_store + # => [pad(16)] + + # Store leaf type + push.LEAF_TYPE_ASSET + exec.utils::swap_u32_bytes + push.LEAF_DATA_START_PTR push.LEAF_TYPE_OFFSET add + mem_store + # => [pad(16)] + + # Store destination_network + loc_load.DESTINATION_NETWORK_LOC + push.LEAF_DATA_START_PTR push.DESTINATION_NETWORK_OFFSET add + mem_store + # => [pad(16)] + + # Store destination_address + loc_load.DESTINATION_ADDRESS_4_LOC + loc_load.DESTINATION_ADDRESS_3_LOC + loc_load.DESTINATION_ADDRESS_2_LOC + loc_load.DESTINATION_ADDRESS_1_LOC + loc_load.DESTINATION_ADDRESS_0_LOC + push.LEAF_DATA_START_PTR push.DESTINATION_ADDRESS_OFFSET add + exec.write_address_to_memory + # => [pad(16)] + + # --- 3. Fetch metadata hash from the faucet via FPI and write to memory --- + procref.agglayer_faucet::get_metadata_hash + # => [PROC_MAST_ROOT, pad(16)] + + # Reload asset to extract faucet ID for the FPI call + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::load + swapw dropw + # => [ASSET_KEY, PROC_MAST_ROOT, pad(16)] + # ASSET layout: [0, 0, faucet_id_suffix, faucet_id_prefix] + + # Extract faucet ID, drop padding and amount + drop drop + # => [faucet_id_suffix, faucet_id_prefix, PROC_MAST_ROOT, pad(16)] + + exec.tx::execute_foreign_procedure + # => [METADATA_HASH_LO, METADATA_HASH_HI, pad(8)] + + push.LEAF_DATA_START_PTR push.METADATA_HASH_OFFSET add + movdn.8 + # => [METADATA_HASH_LO, METADATA_HASH_HI, metadata_hash_ptr, pad(8)] + exec.utils::mem_store_double_word_unaligned + # => [pad(16)] + + # Explicitly zero the 3 padding felts after METADATA_HASH for + # leaf_utils::pack_leaf_data + push.0 + push.LEAF_DATA_START_PTR push.PADDING_OFFSET add + mem_store + + push.0 + push.LEAF_DATA_START_PTR push.PADDING_OFFSET add.1 add + mem_store + + push.0 + push.LEAF_DATA_START_PTR push.PADDING_OFFSET add.2 add + mem_store + # => [pad(16)] + + # --- 4. Compute leaf value and add to MMR frontier --- + push.LEAF_DATA_START_PTR + exec.add_leaf_bridge + # => [pad(16)] + + # --- 4. Create BURN output note for ASSET --- + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(16)] + exec.create_burn_note + # => [pad(16)] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Validates that a faucet is registered in the bridge's faucet registry, then performs +#! an FPI call to the faucet's `asset_to_origin_asset` procedure to obtain the scaled +#! amount, origin token address, and origin network. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [AMOUNT_U256[0](4), AMOUNT_U256[1](4), origin_addr(5), origin_network] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to be bridged out. +#! - ASSET_VALUE is the value of the asset to be bridged out. +#! - AMOUNT_U256: scaled amount as 8 u32 limbs (little-endian) +#! - origin_addr: origin token address (5 u32 felts) +#! - origin_network: origin network identifier +#! +#! Panics if: +#! - The faucet is not registered in the faucet registry. +#! - The FPI call to asset_to_origin_asset fails. +#! +#! Invocation: exec +proc convert_asset + # --- Step 1: Assert faucet is registered --- + # pad in preparation for FPI call + repeat.2 + padw padw swapdw + end + # => [ASSET_KEY, ASSET_VALUE, pad(16)] + swapw + exec.asset::fungible_value_into_amount + movdn.4 + # => [ASSET_KEY, amount, pad(16)] + + exec.asset::key_into_faucet_id + # => [faucet_id_suffix, faucet_id_prefix, amount, pad(16)] + + dup.1 dup.1 + exec.bridge_config::assert_faucet_registered + # => [faucet_id_suffix, faucet_id_prefix, amount, pad(16)] + + # --- Step 2: FPI to faucet's asset_to_origin_asset --- + + procref.agglayer_faucet::asset_to_origin_asset + # => [PROC_MAST_ROOT, faucet_id_suffix, faucet_id_prefix, amount, pad(16)] + + # Move faucet_id above PROC_MAST_ROOT + movup.5 movup.5 + # => [faucet_id_suffix, faucet_id_prefix, PROC_MAST_ROOT, amount, pad(15), pad(1)] + + exec.tx::execute_foreign_procedure + # => [AMOUNT_U256[0](4), AMOUNT_U256[1](4), origin_addr(5), origin_network, pad(2), pad(1)] + + # drop the 3 trailing padding elements + repeat.3 + movup.14 drop + end + # => [AMOUNT_U256[0](4), AMOUNT_U256[1](4), origin_addr(5), origin_network] +end + +#! Computes the leaf value from the leaf data in memory and adds it to the MMR frontier. +#! +#! Inputs: [leaf_data_start_ptr] +#! Outputs: [] +#! +#! Memory layout (starting at leaf_data_start_ptr): +#! [ +#! leafType[1], +#! originNetwork[1], +#! originTokenAddress[5], +#! destinationNetwork[1], +#! destinationAddress[5], +#! amount[8], +#! metadataHash[8], +#! padding[3], +#! ] +#! +#! Invocation: exec +proc add_leaf_bridge(leaf_data_start_ptr: MemoryAddress) + exec.leaf_utils::compute_leaf_value + # => [LEAF_VALUE_LO, LEAF_VALUE_HI] + + # Load the LET frontier from storage into memory at LET_FRONTIER_MEM_PTR + exec.load_let_frontier_to_memory + # => [LEAF_VALUE_LO, LEAF_VALUE_HI] + + # Push frontier pointer below the leaf value + push.LET_FRONTIER_MEM_PTR movdn.8 + # => [LEAF_VALUE_LO, LEAF_VALUE_HI, let_frontier_ptr] + + # Append the leaf to the frontier and compute the new root + exec.mmr_frontier32_keccak::append_and_update_frontier + # => [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] + + # Save the root and num_leaves to their value slots + exec.save_let_root_and_num_leaves + # => [] + + # Write the updated frontier from memory back to the map + exec.save_let_frontier_to_storage + # => [] +end + +#! Loads the LET (Local Exit Tree) frontier from account storage into memory. +#! +#! The num_leaves is read from its dedicated value slot, and the 32 frontier entries are read +#! from the LET map slot (double-word array, indices 0..31). The data is placed into memory at +#! LET_FRONTIER_MEM_PTR, matching the layout expected by append_and_update_frontier: +#! [num_leaves, 0, 0, 0, [[FRONTIER_NODE_LO, FRONTIER_NODE_HI]; 32]] +#! +#! Empty (uninitialized) map entries return zeros, which is the correct initial state for the +#! frontier when there are no leaves. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +proc load_let_frontier_to_memory + # 1. Load num_leaves from its value slot + push.LET_NUM_LEAVES_SLOT[0..2] + exec.active_account::get_item + # => [num_leaves_word] + + push.LET_FRONTIER_MEM_PTR mem_storew_le dropw + # => [] + + # 2. Load 32 frontier double-word entries from the map via double_word_array::get + push.0 + # => [h=0] + + repeat.32 + # => [h] + + # Read frontier[h] as a double word from the map + dup push.LET_FRONTIER_SLOT[0..2] + exec.double_word_array::get + # => [VALUE_0, VALUE_1, h] + + # Compute memory address and store the double word + dup.8 mul.8 add.LET_FRONTIER_MEM_PTR add.4 movdn.8 + # => [VALUE_0, VALUE_1, mem_addr, h] + exec.utils::mem_store_double_word + dropw dropw drop + # => [h] + + add.1 + # => [h+1] + end + + drop + # => [] +end + +#! Saves the Local Exit Root and num_leaves to their dedicated value slots. +#! +#! Inputs: [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] +#! Outputs: [] +#! +#! Invocation: exec +proc save_let_root_and_num_leaves + # 1. Save root lo word to its value slot + push.LET_ROOT_LO_SLOT[0..2] + exec.native_account::set_item + dropw + # => [NEW_ROOT_HI, new_leaf_count] + + # 2. Save root hi word to its value slot + push.LET_ROOT_HI_SLOT[0..2] + exec.native_account::set_item + dropw + # => [new_leaf_count] + + # 3. Save new_leaf_count to its value slot as [new_leaf_count, 0, 0, 0] + push.0.0.0 movup.3 + # => [new_leaf_count, 0, 0, 0] + push.LET_NUM_LEAVES_SLOT[0..2] + exec.native_account::set_item + dropw + # => [] +end + +#! Writes the 32 frontier entries from memory back to the LET map slot. +#! +#! Each frontier entry is a double word (Keccak256 digest) stored at +#! LET_FRONTIER_MEM_PTR + 4 + h * 8, and is written to the map at double_word_array index h. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +proc save_let_frontier_to_storage + push.0 + # => [h=0] + + repeat.32 + # => [h] + + # Load frontier[h] double word from memory + dup mul.8 add.LET_FRONTIER_MEM_PTR add.4 + exec.utils::mem_load_double_word + # => [VALUE_0, VALUE_1, h] + + # Write it back to the map at index h + dup.8 push.LET_FRONTIER_SLOT[0..2] + exec.double_word_array::set + dropw dropw + # => [h] + + add.1 + # => [h+1] + end + + drop + # => [] +end + +#! Writes an Ethereum address (5 u32 felts) to consecutive memory locations. +#! +#! Inputs: [mem_ptr, address(5)] +#! Outputs: [] +#! +#! Invocation: exec +proc write_address_to_memory(mem_ptr: MemoryAddress, address: EthereumAddressFormat) + dup movdn.6 mem_store movup.4 add.1 + # => [mem_ptr+1, address(4)] + + dup movdn.5 mem_store movup.3 add.1 + # => [mem_ptr+2, address(3)] + + dup movdn.4 mem_store movup.2 add.1 + # => [mem_ptr+3, address(2)] + + dup movdn.3 mem_store swap add.1 + # => [mem_ptr+4, address(1)] + + mem_store +end + +#! Computes the SERIAL_NUM of the outputted BURN note. +#! +#! The serial number is computed as hash(B2AGG_SERIAL_NUM, ASSET_KEY). +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [SERIAL_NUM] +#! +#! Where: +#! - ASSET_KEY is the vault key from which to compute the burn note serial number. +#! - SERIAL_NUM is the computed serial number for the BURN note. +#! +#! Invocation: exec +proc compute_burn_note_serial_num + exec.active_note::get_serial_number + # => [B2AGG_SERIAL_NUM, ASSET_KEY] + + exec.poseidon2::merge + # => [SERIAL_NUM] +end + +#! Creates a BURN note for the specified asset with a NetworkAccountTarget attachment. +#! +#! This procedure creates an output note that represents a burn operation for the given asset. +#! The note targets the faucet account via a NetworkAccountTarget attachment. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to be burnt. +#! - ASSET_VALUE is the value of the asset to be burnt. +#! +#! Invocation: exec +@locals(14) +proc create_burn_note + swapw dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY] + + locaddr.CREATE_BURN_NOTE_BURN_ASSET_LOC + exec.asset::store + # => [ASSET_KEY] + + exec.asset::key_to_faucet_id + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] + + # Create NetworkAccountTarget attachment for the faucet + push.ALWAYS movdn.2 + # => [faucet_id_suffix, faucet_id_prefix, exec_hint, ASSET_KEY] + + exec.network_account_target::new + # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT, ASSET_KEY] + + # Save attachment data to locals + loc_store.ATTACHMENT_SCHEME_LOC + loc_store.ATTACHMENT_KIND_LOC + loc_storew_le.ATTACHMENT_LOC dropw + # => [ASSET_KEY] + + exec.compute_burn_note_serial_num + # => [SERIAL_NUM] + + procref.::miden::standards::notes::burn::main swapw + # => [SERIAL_NUM, SCRIPT_ROOT] + + push.BURN_NOTE_NUM_STORAGE_ITEMS push.0 + # => [storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT] + + exec.note::build_recipient + # => [RECIPIENT] + + push.PUBLIC_NOTE + push.DEFAULT_TAG + # => [tag, note_type, RECIPIENT] + + # pad the stack before the call invocation + push.0 movdn.6 push.0 movdn.6 padw padw swapdw + # => [tag, note_type, RECIPIENT, pad(10)] + + call.output_note::create + # => [note_idx, pad(15)] + + # duplicate note_idx: one for set_attachment, one for add_asset + dup + swapw loc_loadw_le.ATTACHMENT_LOC + # => [NOTE_ATTACHMENT, note_idx, note_idx, pad(11)] + + loc_load.ATTACHMENT_KIND_LOC + loc_load.ATTACHMENT_SCHEME_LOC + # => [scheme, kind, NOTE_ATTACHMENT, note_idx, note_idx, pad(11)] + + movup.6 + # => [note_idx, scheme, kind, NOTE_ATTACHMENT, note_idx, pad(11)] + + exec.output_note::set_attachment + # => [note_idx, pad(11)] + + locaddr.CREATE_BURN_NOTE_BURN_ASSET_LOC + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(11)] + + exec.output_note::add_asset + # => [pad(11)] + + dropw dropw drop drop drop + # => [] +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/canonical_zeros.masm b/crates/miden-agglayer/asm/agglayer/bridge/canonical_zeros.masm new file mode 100644 index 0000000000..8ebc0ba1d8 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/canonical_zeros.masm @@ -0,0 +1,142 @@ +# This file is generated by build.rs, do not modify + +# This file contains the canonical zeros for the Keccak hash function. +# Zero of height `n` (ZERO_N) is the root of the binary tree of height `n` with leaves equal zero. +# +# Since the Keccak hash is represented by eight u32 values, each constant consists of two Words. + +const ZERO_0_L = [0, 0, 0, 0] +const ZERO_0_R = [0, 0, 0, 0] + +const ZERO_1_L = [3056087725, 3453220726, 1151697986, 2532382527] +const ZERO_1_R = [2447652395, 2990541491, 3846789184, 3042949783] + +const ZERO_2_L = [1360642484, 2406448277, 4132056164, 1186125340] +const ZERO_2_R = [3704028736, 3993486975, 2661877378, 806175122] + +const ZERO_3_L = [2746866977, 1063027030, 3055947948, 836748766] +const ZERO_3_R = [3686444836, 2778422344, 2319049635, 2243606276] + +const ZERO_4_L = [3010037733, 4058651434, 1513564138, 224004420] +const ZERO_4_R = [3462706719, 3881358125, 2360852476, 1150525734] + +const ZERO_5_L = [3206459406, 1344794057, 3386889228, 523052921] +const ZERO_5_R = [2680951561, 2114802790, 293668224, 768598281] + +const ZERO_6_L = [3173153928, 1087590535, 1715252246, 756088757] +const ZERO_6_R = [4134788524, 283579568, 578821813, 1746508463] + +const ZERO_7_L = [1459738623, 4234379492, 91932979, 40140559] +const ZERO_7_R = [1818541875, 613780937, 3475749318, 2205136186] + +const ZERO_8_L = [1607231384, 2473269631, 2128798138, 611590243] +const ZERO_8_R = [4069577285, 1227307046, 3321779339, 2941712185] + +const ZERO_9_L = [3855940302, 3113795592, 4275626407, 4216691121] +const ZERO_9_R = [1972812290, 1903710296, 1154705673, 3763621903] + +const ZERO_10_L = [2134826233, 1356863200, 861991663, 3567589455] +const ZERO_10_R = [2182953470, 4112065289, 774786966, 2781069751] + +const ZERO_11_L = [1228583416, 167150306, 866654147, 1838648827] +const ZERO_11_R = [1467765009, 2720076317, 4149924453, 2465787000] + +const ZERO_12_L = [3469119540, 3960096235, 2195882716, 270336915] +const ZERO_12_R = [4164671431, 79648606, 171349786, 2631517602] + +const ZERO_13_L = [3649232833, 326416580, 82830058, 3551827087] +const ZERO_13_R = [1944734805, 2047814617, 1895984889, 3152187846] + +const ZERO_14_L = [3618465628, 49531590, 3755895333, 3658789242] +const ZERO_14_R = [1894305546, 2762164692, 3598841737, 3435063385] + +const ZERO_15_L = [2681109466, 3055060558, 843132861, 3700193742] +const ZERO_15_R = [3790037114, 2574387782, 708101859, 3525744215] + +const ZERO_16_L = [266679079, 4207046226, 824943129, 4065390056] +const ZERO_16_R = [4194160956, 3981742412, 2718529082, 530120689] + +const ZERO_17_L = [3367359457, 3833704967, 3603315816, 1543068721] +const ZERO_17_R = [1099357850, 598998238, 650244466, 2062522595] + +const ZERO_18_L = [181284186, 3144187786, 2400147060, 746357617] +const ZERO_18_R = [4157324078, 2923625471, 1072797208, 2692314236] + +const ZERO_19_L = [3056102068, 4164965877, 1039549588, 1032730592] +const ZERO_19_R = [2665487122, 3986541574, 1491476508, 2691355510] + +const ZERO_20_L = [1167482566, 3062253412, 719184416, 4242360534] +const ZERO_20_R = [1535003327, 3478010394, 1732703975, 3803705507] + +const ZERO_21_L = [2290434548, 1168258541, 971767692, 4045815225] +const ZERO_21_R = [1001466509, 3853444828, 899251086, 3655320222] + +const ZERO_22_L = [3692469338, 418371072, 1866109879, 3411854989] +const ZERO_22_R = [946955861, 3934089079, 3698331664, 2011403911] + +const ZERO_23_L = [1078982733, 696388782, 2651248336, 2805567324] +const ZERO_23_R = [2053609922, 4234662665, 3168994683, 1390808632] + +const ZERO_24_L = [2502281165, 276516087, 4292988995, 1681176506] +const ZERO_24_R = [4220355468, 1910056709, 565969590, 4011431532] + +const ZERO_25_L = [3378167562, 1475191156, 2768897524, 1956437264] +const ZERO_25_R = [2066155765, 999806777, 3318538162, 2371989742] + +const ZERO_26_L = [74763704, 4030198639, 2385297319, 1678762243] +const ZERO_26_R = [2038831148, 1786802573, 3649628337, 3498569445] + +const ZERO_27_L = [1431735427, 3418759627, 1513828739, 3748991331] +const ZERO_27_R = [1916245748, 2165369292, 3360338824, 516194684] + +const ZERO_28_L = [3722718822, 3165837101, 2975955312, 79972070] +const ZERO_28_R = [3067898230, 2366459736, 1571753335, 787185022] + +const ZERO_29_L = [246581816, 1909551909, 3876094376, 2551087773] +const ZERO_29_R = [2215341298, 1244629930, 3146618532, 581144193] + +const ZERO_30_L = [1350312851, 1223587258, 2904706143, 1078065138] +const ZERO_30_R = [1787682571, 2128594844, 578217418, 903308566] + +const ZERO_31_L = [2340505732, 1648733876, 2660540036, 3759582231] +const ZERO_31_R = [2389186238, 4049365781, 1653344606, 2840985724] + +use ::agglayer::common::utils::mem_store_double_word + +#! Inputs: [zeros_ptr] +#! Outputs: [] +pub proc load_zeros_to_memory + push.ZERO_0_R.ZERO_0_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_1_R.ZERO_1_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_2_R.ZERO_2_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_3_R.ZERO_3_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_4_R.ZERO_4_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_5_R.ZERO_5_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_6_R.ZERO_6_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_7_R.ZERO_7_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_8_R.ZERO_8_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_9_R.ZERO_9_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_10_R.ZERO_10_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_11_R.ZERO_11_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_12_R.ZERO_12_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_13_R.ZERO_13_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_14_R.ZERO_14_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_15_R.ZERO_15_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_16_R.ZERO_16_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_17_R.ZERO_17_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_18_R.ZERO_18_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_19_R.ZERO_19_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_20_R.ZERO_20_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_21_R.ZERO_21_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_22_R.ZERO_22_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_23_R.ZERO_23_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_24_R.ZERO_24_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_25_R.ZERO_25_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_26_R.ZERO_26_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_27_R.ZERO_27_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_28_R.ZERO_28_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_29_R.ZERO_29_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_30_R.ZERO_30_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_31_R.ZERO_31_L exec.mem_store_double_word dropw dropw add.8 + drop +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/leaf_utils.masm b/crates/miden-agglayer/asm/agglayer/bridge/leaf_utils.masm new file mode 100644 index 0000000000..f9d697c66d --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/leaf_utils.masm @@ -0,0 +1,149 @@ +use miden::core::crypto::hashes::keccak256 +use miden::protocol::types::DoubleWord +use miden::protocol::types::MemoryAddress + +# CONSTANTS +# ================================================================================================= + +# the number of bytes in the leaf data to hash (matches Solidity's abi.encodePacked output) +const LEAF_DATA_BYTES = 113 + +# the local memory offset where we store the leaf data start pointer +const PACKING_START_PTR_LOCAL= 0 + +# the number of elements to pack (113 bytes = 29 elements, rounding up from 28.25) +const PACKED_DATA_NUM_ELEMENTS = 29 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Given a memory address where the unpacked leaf data starts, packs the leaf data in-place, and +#! computes the leaf value by hashing the packed bytes. +#! +#! Inputs: [LEAF_DATA_START_PTR] +#! Outputs: [LEAF_VALUE[8]] +#! +#! Invocation: exec +pub proc compute_leaf_value(leaf_data_start_ptr: MemoryAddress) -> DoubleWord + dup + # => [leaf_data_start_ptr, leaf_data_start_ptr] + exec.pack_leaf_data + # => [leaf_data_start_ptr] + + push.LEAF_DATA_BYTES swap + # => [start_ptr, byte_len] + + exec.keccak256::hash_bytes + # => [LEAF_VALUE[8]] +end + +#! Packs the raw leaf data by shifting left 3 bytes to match Solidity's abi.encodePacked format. +#! +#! The raw data has leafType occupying 4 bytes (as a u32 felt) but Solidity's abi.encodePacked +#! only uses 1 byte for uint8 leafType. This procedure shifts all data left by 3 bytes so that: +#! - Byte 0: leafType (1 byte) +#! - Bytes 1-4: originNetwork (4 bytes) +#! - etc. +#! +#! The Keccak precompile expects u32 values packed in little-endian byte order. +#! For each packed element, we drop the leading 3 bytes and rebuild the u32 so that +#! bytes [b0, b1, b2, b3] map to u32::from_le_bytes([b0, b1, b2, b3]). +#! With little-endian input limbs, the first byte comes from the MSB of `curr` and +#! the next three bytes come from the LSBs of `next`: +#! packed = ((curr >> 24) & 0xFF) +#! | (next & 0xFF) << 8 +#! | ((next >> 8) & 0xFF) << 16 +#! | ((next >> 16) & 0xFF) << 24 +#! +#! To help visualize the packing process, consider that each field element represents a 4-byte +#! value [u8; 4] (LE). +#! Memory before is: +#! ptr+0: 1 felt: [a, b, c, d] +#! ptr+1: 1 felt: [e, f, g, h] +#! ptr+2..6: 5 felts: [i, j, k, l, m, ...] +#! +#! Memory after: +#! ptr+0: 1 felt: [d, e, f, g] +#! ptr+1: 1 felt: [h, i, j, k] +#! ptr+2..6: 5 felts: [l, ...] +#! +#! Inputs: [leaf_data_start_ptr] +#! Outputs: [] +#! +#! Invocation: exec +@locals(1) # start_ptr +pub proc pack_leaf_data(leaf_data_start_ptr: MemoryAddress) + loc_store.PACKING_START_PTR_LOCAL + # => [] + + # initialize loop counter to 0 + push.0 + + # push initial condition (true) to enter the loop + push.1 + + # loop through elements from 0 to PACKED_DATA_NUM_ELEMENTS - 1 (28) + while.true + # => [counter] + + # compute source address: packing_start_ptr + counter + dup loc_load.PACKING_START_PTR_LOCAL add + # => [src_addr, counter] + + # load current element + mem_load + # => [curr_elem, counter] + + # extract MSB (upper 8 bits) which becomes the first little-endian byte + dup u32shr.24 + # => [curr_msb, curr_elem, counter] + + # compute source address for next element (counter + 1) + dup.2 loc_load.PACKING_START_PTR_LOCAL add add.1 + # => [next_src_addr, curr_lsb, curr_elem, counter] + + # load next element + mem_load + # => [next_elem, curr_lsb, curr_elem, counter] + + # keep curr_msb on top for combination + swap + # => [curr_msb, next_elem, curr_elem, counter] + + # add next byte0 (bits 0..7) into bits 8..15 + dup.1 u32and.0xFF u32shl.8 u32or + # => [partial, next_elem, curr_elem, counter] + + # add next byte1 (bits 8..15) into bits 16..23 + dup.1 u32shr.8 u32and.0xFF u32shl.16 u32or + # => [partial, next_elem, curr_elem, counter] + + # add next byte2 (bits 16..23) into bits 24..31 + dup.1 u32shr.16 u32and.0xFF u32shl.24 u32or + # => [packed_elem, next_elem, curr_elem, counter] + + # drop the next and current elements (no longer needed) + movdn.2 drop drop + # => [packed_elem, counter] + + # compute destination address: packing_start_ptr + counter (in-place) + dup.1 loc_load.PACKING_START_PTR_LOCAL add + # => [dest_addr, packed_elem, counter] + + # store packed element + mem_store + # => [counter] + + # increment counter + add.1 + # => [counter + 1] + + # check if we should continue (counter < PACKED_DATA_NUM_ELEMENTS) + dup push.PACKED_DATA_NUM_ELEMENTS lt + # => [should_continue, counter] + end + # => [counter] + + drop + # => [] +end diff --git a/crates/miden-agglayer/asm/bridge/mmr_frontier32_keccak.masm b/crates/miden-agglayer/asm/agglayer/bridge/mmr_frontier32_keccak.masm similarity index 94% rename from crates/miden-agglayer/asm/bridge/mmr_frontier32_keccak.masm rename to crates/miden-agglayer/asm/agglayer/bridge/mmr_frontier32_keccak.masm index b789847639..32eef7cec1 100644 --- a/crates/miden-agglayer/asm/bridge/mmr_frontier32_keccak.masm +++ b/crates/miden-agglayer/asm/agglayer/bridge/mmr_frontier32_keccak.masm @@ -1,7 +1,7 @@ use miden::core::crypto::hashes::keccak256 -use ::miden::agglayer::canonical_zeros::load_zeros_to_memory -use ::miden::agglayer::utils::mem_store_double_word -use ::miden::agglayer::utils::mem_load_double_word +use ::agglayer::bridge::canonical_zeros::load_zeros_to_memory +use ::agglayer::common::utils::mem_store_double_word +use ::agglayer::common::utils::mem_load_double_word # An MMR Frontier is a data structure based on an MMR, which combines some features of an MMR and an # SMT. @@ -136,8 +136,8 @@ const CANONICAL_ZEROES_LOCAL = 8 @locals(264) # new_leaf/curr_hash + canonical_zeros pub proc append_and_update_frontier # set CUR_HASH = NEW_LEAF and store to local memory - loc_storew_be.CUR_HASH_LO_LOCAL dropw - loc_storew_be.CUR_HASH_HI_LOCAL dropw + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw # => [mmr_frontier_ptr] # get the current leaves number @@ -203,8 +203,8 @@ pub proc append_and_update_frontier # load the current hash from the local memory back to the stack # # in the first iteration the current hash will be equal to the new node - padw loc_loadw_be.CUR_HASH_HI_LOCAL - padw loc_loadw_be.CUR_HASH_LO_LOCAL + padw loc_loadw_le.CUR_HASH_HI_LOCAL + padw loc_loadw_le.CUR_HASH_LO_LOCAL swapdw # => [ # FRONTIER[curr_tree_height]_LO, FRONTIER[curr_tree_height]_HI, CUR_HASH_LO, @@ -217,16 +217,16 @@ pub proc append_and_update_frontier # => [CUR_HASH_LO', CUR_HASH_HI', curr_tree_height, num_leaves, mmr_frontier_ptr] # store the current hash of the next height back to the local memory - loc_storew_be.CUR_HASH_LO_LOCAL dropw - loc_storew_be.CUR_HASH_HI_LOCAL dropw + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw # => [curr_tree_height, num_leaves, mmr_frontier_ptr] else # => [frontier[curr_tree_height]_ptr, curr_tree_height, num_leaves, mmr_frontier_ptr] # # this height wasn't "occupied" yet: store the current hash as the subtree root # (frontier node) at height `curr_tree_height` - padw loc_loadw_be.CUR_HASH_HI_LOCAL - padw loc_loadw_be.CUR_HASH_LO_LOCAL + padw loc_loadw_le.CUR_HASH_HI_LOCAL + padw loc_loadw_le.CUR_HASH_LO_LOCAL # => [ # CUR_HASH_LO, CUR_HASH_HI, frontier[curr_tree_height]_ptr, curr_tree_height, # num_leaves, mmr_frontier_ptr @@ -256,8 +256,8 @@ pub proc append_and_update_frontier # => [CUR_HASH_LO', CUR_HASH_HI', curr_tree_height, num_leaves, mmr_frontier_ptr] # store the current hash of the next height back to the local memory - loc_storew_be.CUR_HASH_LO_LOCAL dropw - loc_storew_be.CUR_HASH_HI_LOCAL dropw + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw # => [curr_tree_height, num_leaves, mmr_frontier_ptr] end # => [curr_tree_height, num_leaves, mmr_frontier_ptr] @@ -292,8 +292,8 @@ pub proc append_and_update_frontier # compute. # load the final hash (which is also the root of the tree) - padw loc_loadw_be.CUR_HASH_HI_LOCAL - padw loc_loadw_be.CUR_HASH_LO_LOCAL + padw loc_loadw_le.CUR_HASH_HI_LOCAL + padw loc_loadw_le.CUR_HASH_LO_LOCAL # => [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] end diff --git a/crates/miden-agglayer/asm/agglayer/common/asset_conversion.masm b/crates/miden-agglayer/asm/agglayer/common/asset_conversion.masm new file mode 100644 index 0000000000..298d85eccc --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/common/asset_conversion.masm @@ -0,0 +1,393 @@ +use miden::core::math::u64 +use miden::core::word +use agglayer::common::utils +use ::miden::protocol::asset::FUNGIBLE_ASSET_MAX_AMOUNT + +# ERRORS +# ================================================================================================= + +const ERR_SCALE_AMOUNT_EXCEEDED_LIMIT="maximum scaling factor is 18" +const ERR_X_TOO_LARGE="the agglayer bridge in u256 value is larger than 2**128 and cannot be verifiably scaled to u64" +const ERR_UNDERFLOW="x < y*10^s (underflow detected)" +const ERR_REMAINDER_TOO_LARGE="remainder z must be < 10^s" +const ERR_Y_TOO_LARGE="y exceeds max fungible token amount" + +# CONSTANTS +# ================================================================================================= + +const MAX_SCALING_FACTOR=18 + +#! Calculate 10^scale where scale is a u8 exponent. +#! +#! Inputs: [scale] +#! Outputs: [10^scale] +#! +#! Where: +#! - scale is expected to be a small integer (0-18 typical for crypto decimals) +#! +#! Panics if: +#! - scale > 18 (overflow protection) +proc pow10 + u32assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT + # => [scale] + + dup u32lte.MAX_SCALING_FACTOR assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT + # => [scale] + + push.1 swap + # => [scale, result] + + dup neq.0 + # => [is_not_zero, scale, result] + + # Loop to calculate 10^scale + while.true + # => [scale, result] + + # result *= 10 + swap mul.10 swap + # => [scale, result*10] + + # scale -= 1 + sub.1 + # => [scale-1, result*10] + + dup neq.0 + # => [is_not_zero, scale-1, result*10] + end + # => [0, result] + + drop + # => [result] +end + +#! Convert an asset amount to a scaled U256 representation for bridging to Agglayer. +#! +#! This procedure is used to convert Miden asset amounts to EVM asset amounts. +#! It multiplies the input amount by 10^target_scale to adjust for decimal differences +#! between the current representation and the target chain's native decimals. +#! +#! The procedure first calculates 10^target_scale using the pow10 helper, then converts +#! both the amount and scale factor to U64 format, performs U64 multiplication, and +#! returns the result as 8 u32 limbs in little-endian order (U256 format). +#! +#! Inputs: [amount, target_scale] +#! Outputs: [[RESULT_U256[0], RESULT_U256[1]]] +#! +#! Where: +#! - amount: The asset amount to be converted (range: 0 to 2^63 - 2^31) +#! - target_scale: Exponent for scaling factor (10^target_scale) +#! - [RESULT_U256[0], RESULT_U256[1]]: U256 value as 8 u32 limbs in little-endian order +#! (least significant limb at the top of the stack, each limb stored in little-endian format) +#! +#! Examples: +#! - USDC: amount=1000000000, target_scale=0 → 1000000000 (no scaling) +#! - ETH: amount=1e10, target_scale=8 → 1e18 +#! +#! Invocation: exec +pub proc scale_native_amount_to_u256 + swap + # => [target_scale, amount] + + exec.pow10 + # => [scale, amount] + + u32split + # => [scale_lo, scale_hi, amount] + + movup.2 u32split + # => [amount_lo, amount_hi, scale_lo, scale_hi] + + # Perform U64 multiplication: amount * scale + # This is safe because both the scaling factor and amount are guaranteed to be smaller + # than 2^64, so we will never overflow a 256-bit value. + exec.u64::widening_mul + # => [res_lo, res_mid_lo, res_mid_hi, res_hi] + + # convert to U256 & little endian + padw swapw + # => [RESULT_U256[0], RESULT_U256[1]] +end + +#! Reverse the limbs and change the byte endianness of the result. +pub proc reverse_limbs_and_change_byte_endianness + # reverse the felts within each word + # [a, b, c, d, e, f, g, h] -> [h, g, f, e, d, c, b, a] + exec.word::reverse + swapw + exec.word::reverse + + # change the byte endianness of each felt + repeat.8 + exec.utils::swap_u32_bytes + movdn.7 + end + + # => [RESULT_U256[0], RESULT_U256[1]] +end + +#! Subtract two 128-bit integers (little-endian u32 limbs) and assert no underflow. +#! +#! Computes: +#! z = x - y +#! with the constraint: +#! y <= x +#! +#! Each 128-bit value is stored as 4 u32 limbs in little-endian order: +#! value = limb0 + limb1·2^32 + limb2·2^64 + limb3·2^96 +#! +#! The subtraction is performed in three steps using u64::overflowing_sub: +#! 1. (z0, z1) = (x0, x1) - (y0, y1) -> borrow_lo +#! 2. (t0, t1) = (x2, x3) - (y2, y3) -> underflow_hi_raw +#! 3. (z2, z3) = (t0, t1) - borrow_lo -> underflow_hi_borrow +#! +#! Inputs: [y0, y1, y2, y3, x0, x1, x2, x3] +#! Outputs: [z0, z1, z2, z3] +#! +#! Panics if: +#! - y > x (ERR_UNDERFLOW) +proc u128_sub_no_underflow + # Put x-word on top for easier access. + swapw + # => [x0, x1, x2, x3, y0, y1, y2, y3] + + # ============================================================================================= + # Step 1: (z0, z1) = (x0, x1) - (y0, y1) + # u64::overflowing_sub expects [b_lo, b_hi, a_lo, a_hi], computes a - b + # ============================================================================================= + movup.5 + movup.5 + # => [y0, y1, x0, x1, x2, x3, y2, y3] + + exec.u64::overflowing_sub + # => [borrow_lo, z0, z1, x2, x3, y2, y3] + + # ============================================================================================= + # Step 2: (t0, t1) = (x2, x3) - (y2, y3) [raw, without borrow] + # Arrange as [y2, y3, x2, x3] + # ============================================================================================= + + movup.4 + movup.4 + # => [x2, x3, borrow_lo, z0, z1, y2, y3] + + movup.6 + movup.6 + # => [y2, y3, x2, x3, borrow_lo, z0, z1] + + exec.u64::overflowing_sub + # => [underflow_hi_raw, t0, t1, borrow_lo, z0, z1] + + # ============================================================================================= + # Step 3: (z2, z3) = (t0, t1) - borrow_lo + # Arrange as [borrow_lo, 0, t0, t1] + # ============================================================================================= + swap.3 + # => [borrow_lo, t0, t1, underflow_hi_raw, z0, z1] + + push.0 swap + # => [borrow_lo, 0, t0, t1, underflow_hi_raw, z0, z1] + + exec.u64::overflowing_sub + # => [underflow_hi_borrow, z2, z3, underflow_hi_raw, z0, z1] + + # Underflow iff either high-half step underflowed. + movup.3 or + assertz.err=ERR_UNDERFLOW + # => [z2, z3, z0, z1] + + # Rearrange to little-endian order. + movup.3 movup.3 + # => [z0, z1, z2, z3] +end + +#! Verify conversion from a U128 amount to a Miden native amount (Felt) +#! +#! Specification: +#! Verify that a provided y is the quotient of dividing x by 10^scale_exp: +#! y = floor(x / 10^scale_exp) +#! +#! This procedure does NOT perform division. It proves the quotient is correct by checking: +#! 1) y is within the allowed fungible token amount range +#! 2) y_scaled = y * 10^scale_exp (computed via scale_native_amount_to_u256) +#! 3) z = x - y_scaled (must not underflow, i.e. y_scaled <= x) +#! 4) z fits in 64 bits (upper 192 bits are zero) +#! 5) (z1, z0) < 10^scale_exp (remainder bound) +#! +#! These conditions prove: +#! x = y_scaled + z, with 0 <= z < 10^scale_exp +#! which uniquely implies: +#! y = floor(x / 10^scale_exp) +#! +#! Example (ETH -> Miden base 1e8): +#! - EVM amount: 100 ETH = 100 * 10^18 +#! - Miden amount: 100 ETH = 100 * 10^8 +#! - Therefore the scale-down factor is: +#! scale = 10^(18 - 8) = 10^10 +#! scale_exp = 10 +#! - Inputs/expected values: +#! x = 100 * 10^18 +#! y = floor(x / 10^10) = 100 * 10^8 +#! y_scaled = y * 10^10 = 100 * 10^18 +#! z = x - y_scaled = 0 +#! +#! Inputs: [x0, x1, x2, x3, scale_exp, y] +#! Where x is encoded as 4 u32 limbs in little-endian order. +#! (x0 is least significant limb) +#! Outputs: [] +#! +#! Where: +#! - x: The original amount as an unsigned 128-bit integer (U128). +#! It is provided on the operand stack as 4 little-endian u32 limbs: +#! x = x0 + x1·2^32 + x2·2^64 + x3·2^96 +#! - x0..x3: 32-bit limbs of x in little-endian order (x0 is least significant). +#! - scale_exp: The base-10 exponent used for scaling down (an integer in [0, 18]). +#! - y: The provided quotient (Miden native amount) as a Felt interpreted as an unsigned u64. +#! - y_scaled: The 256-bit value y * 10^scale_exp represented as 8 u32 limbs (big-endian). +#! - z: The remainder-like difference z = x - y_scaled (essentially dust that is lost in the +#! conversion due to precision differences). This verifier requires z < 10^scale_exp. +#! +#! Panics if: +#! - scale_exp > 18 (asserted in pow10 via scale_native_amount_to_u256) +#! - y exceeds the max fungible token amount +#! - x < y * 10^scale_exp (underflow) +#! - z does not fit in 64 bits +#! - (z1, z0) >= 10^scale_exp (remainder too large) +pub proc verify_u128_to_native_amount_conversion + # => [x0, x1, x2, x3, scale_exp, y] + + # ============================================================================================= + # Step 1: Enforce y <= MAX_FUNGIBLE_TOKEN_AMOUNT + # Constraint: y <= MAX_FUNGIBLE_TOKEN_AMOUNT + # ============================================================================================= + dup.5 + push.FUNGIBLE_ASSET_MAX_AMOUNT + lte + # => [is_lte, x0, x1, x2, x3, scale_exp, y] + + assert.err=ERR_Y_TOO_LARGE + # => [x0, x1, x2, x3, scale_exp, y] + + # ============================================================================================= + # Step 2: Compute y_scaled = y * 10^scale_exp + # + # Call: + # scale_native_amount_to_u256(amount=y, target_scale=scale_exp) + # ============================================================================================= + movup.4 + movup.5 + # => [y, scale_exp, x0, x1, x2, x3] + + # Duplicate scale_exp (needed later for remainder bound check in Step 4) + dup.1 swap + # => [y, scale_exp, scale_exp, x0, x1, x2, x3] + + exec.scale_native_amount_to_u256 + # => [y_scaled0..y_scaled7, scale_exp, x0, x1, x2, x3] + + # Drop the upper word as it's guaranteed to be zero since y_scaled will fit in 123 bits + # (amount: 63 bits, 10^target_scale: 60 bits). + swapw dropw + # => [y_scaled0, y_scaled1, y_scaled2, y_scaled3, scale_exp, x0, x1, x2, x3] + + # ============================================================================================= + # Step 3: Compute z = x - y_scaled and prove no underflow + # z := x - y_scaled + # Constraint: y_scaled <= x + # ============================================================================================= + movup.4 movdn.8 + # => [y_scaled0, y_scaled1, y_scaled2, y_scaled3, x0, x1, x2, x3, scale_exp] + + exec.u128_sub_no_underflow + # => [z0, z1, z2, z3, scale_exp] + + # ============================================================================================= + # Step 4: Enforce z < 10^scale_exp (remainder bound) + # + # We compare z against 10^scale_exp using a u64 comparison on (z1, z0). + # To make that comparison complete, we must first prove z fits into 64 bits, i.e. z2 == z3 == 0. + # + # This is justified because scale_exp <= 18, so 10^scale_exp <= 10^18 < 2^60. + # Therefore any valid remainder z < 10^scale_exp must be < 2^60 and thus must have z2 == z3 == 0. + # ============================================================================================= + # u128_sub_no_underflow returns [z0, z1, z2, z3] in LE order. + # Assert z2 == z3 == 0 (remainder fits in 64 bits). + movup.3 + assertz.err=ERR_REMAINDER_TOO_LARGE # z3 == 0 + movup.2 + assertz.err=ERR_REMAINDER_TOO_LARGE # z2 == 0 + # => [z0, z1, scale_exp] + + movup.2 + exec.pow10 + # => [scale, z0, z1] + + u32split + # => [scale_lo, scale_hi, z0, z1] + + exec.u64::lt + # => [is_lt] + + assert.err=ERR_REMAINDER_TOO_LARGE + # => [] +end + +#! Verify conversion from an AggLayer U256 amount to a Miden native amount (Felt) +#! +#! This procedure first checks that the U256 value fits in 128 bits (x4..x7 == 0), +#! then delegates to verify_u128_to_native_amount_conversion for the actual verification. +#! +#! Specification: +#! Verify that a provided y is the quotient of dividing x by 10^scale_exp: +#! y = floor(x / 10^scale_exp) +#! +#! Example (ETH -> Miden base 1e8): +#! - EVM amount: 100 ETH = 100 * 10^18 +#! - Miden amount: 100 ETH = 100 * 10^8 +#! - Therefore the scale-down factor is: +#! scale = 10^(18 - 8) = 10^10 +#! scale_exp = 10 +#! - Inputs/expected values: +#! x = 100 * 10^18 +#! y = floor(x / 10^10) = 100 * 10^8 +#! y_scaled = y * 10^10 = 100 * 10^18 +#! z = x - y_scaled = 0 +#! +#! Inputs: [x7, x6, x5, x4, x3, x2, x1, x0, scale_exp, y] +#! Where x is encoded as 8 u32 limbs in big-endian order. +#! (x7 is most significant limb and is at the top of the stack) +#! Each limb is expected to contain little-endian bytes. +#! Outputs: [] +#! +#! Where: +#! - x: The original AggLayer amount as an unsigned 256-bit integer (U256). +#! It is provided on the operand stack as 8 big-endian u32 limbs: +#! x = x0 + x1·2^32 + x2·2^64 + x3·2^96 + x4·2^128 + x5·2^160 + x6·2^192 + x7·2^224 +#! - x0..x7: 32-bit limbs of x in big-endian order (x0 is least significant). +#! - scale_exp: The base-10 exponent used for scaling down (an integer in [0, 18]). +#! - y: The provided quotient (Miden native amount) as a Felt interpreted as an unsigned u64. +#! +#! Panics if: +#! - x does not fit into 128 bits (x4..x7 are not all zero) +#! - scale_exp > 18 (asserted in pow10 via scale_native_amount_to_u256) +#! - y exceeds the max fungible token amount +#! - x < y * 10^scale_exp (underflow) +#! - z does not fit in 64 bits +#! - (z1, z0) >= 10^scale_exp (remainder too large) +pub proc verify_u256_to_native_amount_conversion + + # reverse limbs and byte endianness + exec.reverse_limbs_and_change_byte_endianness + # => [x0, x1, x2, x3, x4, x5, x6, x7, scale_exp, y] + + # Enforce x < 2^128 + # Constraint: x4 == x5 == x6 == x7 == 0 + swapw + exec.word::eqz + assert.err=ERR_X_TOO_LARGE + # => [x0, x1, x2, x3, scale_exp, y] + + # Delegate to verify_u128_to_native_amount_conversion for the remaining verification + exec.verify_u128_to_native_amount_conversion + # => [] +end diff --git a/crates/miden-agglayer/asm/agglayer/common/eth_address.masm b/crates/miden-agglayer/asm/agglayer/common/eth_address.masm new file mode 100644 index 0000000000..5a038f07fe --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/common/eth_address.masm @@ -0,0 +1,109 @@ +use agglayer::common::utils +use miden::core::crypto::hashes::keccak256 +use miden::core::word + +# ERRORS +# ================================================================================================= + +const ERR_NOT_U32="address limb is not u32" +const ERR_MSB_NONZERO="most-significant 4 bytes must be zero for AccountId" +const ERR_FELT_OUT_OF_FIELD="combined u64 doesn't fit in field" + +# CONSTANTS +# ================================================================================================= + +const U32_MAX=4294967295 +const TWO_POW_32=4294967296 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Converts an Ethereum address format (address[5] type) back into an AccountId [prefix, suffix] type. +#! +#! The Ethereum address format is represented as 5 u32 limbs (20 bytes total) in *big-endian limb order* +#! (matching Solidity ABI encoding). Each limb encodes its 4 bytes in little-endian order: +#! limb0 = bytes[0..4] (most-significant 4 bytes, must be zero for AccountId) +#! limb1 = bytes[4..8] +#! limb2 = bytes[8..12] +#! limb3 = bytes[12..16] +#! limb4 = bytes[16..20] (least-significant 4 bytes) +#! +#! The most-significant 4 bytes must be zero for a valid AccountId conversion (be0 == 0). +#! The remaining 16 bytes are treated as two 8-byte words (conceptual u64 values): +#! prefix = (bswap(limb1) << 32) | bswap(limb2) # bytes[4..12] +#! suffix = (bswap(limb3) << 32) | bswap(limb4) # bytes[12..20] +#! +#! These 8-byte words are represented as field elements by packing two u32 limbs into a felt. +#! The packing is done via build_felt, which validates limbs are u32 and checks the packed value +#! did not reduce mod p (i.e. the word fits in the field). +#! +#! Inputs: [limb0, limb1, limb2, limb3, limb4] +#! Outputs: [suffix, prefix] +#! +#! Invocation: exec +pub proc to_account_id + # limb0 must be 0 (most-significant limb, on top) + assertz.err=ERR_MSB_NONZERO + # => [limb1, limb2, limb3, limb4] + + # Reorder for suffix = build_felt(limb4, limb3) where limb4=lo, limb3=hi + movup.2 movup.3 + # => [limb4, limb3, limb1, limb2] + + exec.build_felt + # => [suffix, limb1, limb2] + + # Reorder for prefix = build_felt(limb2, limb1) where limb2=lo, limb1=hi + swap movup.2 + # => [limb2, limb1, suffix] + + exec.build_felt + # => [prefix, suffix] + + swap + # => [suffix, prefix] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Builds a single felt from two u32 limbs (little-endian limb order, little-endian bytes). +#! Conceptually, this is packing a 64-bit word (lo + (hi << 32)) into a field element. +#! This proc additionally verifies that the packed value did *not* reduce mod p by round-tripping +#! through u32split and comparing the limbs. +#! +#! Inputs: [lo, hi] +#! Outputs: [felt] +proc build_felt + # --- validate u32 limbs --- + u32assert2.err=ERR_NOT_U32 + # => [lo_be, hi_be] + + # limbs are little-endian bytes; swap to big-endian for building account ID + exec.utils::swap_u32_bytes + swap + exec.utils::swap_u32_bytes + swap + # => [lo, hi] + + # keep copies for the overflow check + dup.1 dup.1 + # => [lo_be, hi_be, lo_be, hi_be] + + # felt = (hi * 2^32) + lo + swap + push.TWO_POW_32 mul + add + # => [felt, lo_be, hi_be] + + # ensure no reduction mod p happened: + # split felt back into (lo, hi) and compare to inputs + dup u32split + # => [lo2, hi2, felt, lo_be, hi_be] + + movup.3 assert_eq.err=ERR_FELT_OUT_OF_FIELD + # => [hi2, felt, hi_be] + + movup.2 assert_eq.err=ERR_FELT_OUT_OF_FIELD + # => [felt] +end diff --git a/crates/miden-agglayer/asm/agglayer/common/utils.masm b/crates/miden-agglayer/asm/agglayer/common/utils.masm new file mode 100644 index 0000000000..ac4e5fbbae --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/common/utils.masm @@ -0,0 +1,95 @@ +# Utility module containing helper procedures for double word handling and byte manipulation. + +use miden::protocol::types::DoubleWord +use miden::protocol::types::MemoryAddress + +# TYPE ALIASES +# ================================================================================================= + +pub type EthereumAddressFormat = struct { a: felt, b: felt, c: felt, d: felt, e: felt } + +# BYTE MANIPULATION +# ================================================================================================= + +#! Swaps byte order in a u32 limb (LE <-> BE). +#! +#! Inputs: [value] +#! Outputs: [swapped] +pub proc swap_u32_bytes + # part0 = (value & 0xFF) << 24 + dup u32and.0xFF u32shl.24 + # => [part0, value] + + # part1 = ((value >> 8) & 0xFF) << 16 + dup.1 u32shr.8 u32and.0xFF u32shl.16 u32or + # => [part01, value] + + # part2 = ((value >> 16) & 0xFF) << 8 + dup.1 u32shr.16 u32and.0xFF u32shl.8 u32or + # => [part012, value] + + # part3 = (value >> 24) + dup.1 u32shr.24 u32or + # => [swapped, value] + + swap drop + # => [swapped] +end + +# DOUBLE WORD MEMORY OPERATIONS +# ================================================================================================= + +#! Stores two words to the provided global memory address. +#! +#! Inputs: [WORD_1, WORD_2, ptr] +#! Outputs: [WORD_1, WORD_2, ptr] +pub proc mem_store_double_word( + double_word_to_store: DoubleWord, + mem_ptr: MemoryAddress +) -> (DoubleWord, MemoryAddress) + dup.8 mem_storew_le swapw + # => [WORD_2, WORD_1, ptr] + + dup.8 add.4 mem_storew_le swapw + # => [WORD_1, WORD_2, ptr] +end + +#! Stores two words to the provided unaligned (not a multiple of 4) memory address. +#! +#! Inputs: [WORD_1, WORD_2, ptr] +#! Outputs: [] +pub proc mem_store_double_word_unaligned( + double_word_to_store: DoubleWord, + mem_ptr: MemoryAddress +) + # bring ptr to the top of the stack + dup.8 + # => [ptr, WORD_1, WORD_2, ptr] + + # store each element individually at consecutive addresses + mem_store dup.7 add.1 + mem_store dup.6 add.2 + mem_store dup.5 add.3 + mem_store + # => [WORD_2, ptr] + + dup.4 add.4 + mem_store dup.3 add.5 + mem_store dup.2 add.6 + mem_store dup.1 add.7 + mem_store + drop + # => [] +end + +#! Loads two words from the provided global memory address. +#! +#! Inputs: [ptr] +#! Outputs: [WORD_1, WORD_2] +pub proc mem_load_double_word(mem_ptr: MemoryAddress) -> DoubleWord + padw dup.4 add.4 mem_loadw_le + # => [WORD_2, ptr] + + padw movup.8 mem_loadw_le + # => [WORD_1, WORD_2] +end diff --git a/crates/miden-agglayer/asm/agglayer/faucet/mod.masm b/crates/miden-agglayer/asm/agglayer/faucet/mod.masm new file mode 100644 index 0000000000..913ebfa6ac --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/faucet/mod.masm @@ -0,0 +1,202 @@ +use miden::core::sys +use agglayer::common::utils +use agglayer::common::asset_conversion +use agglayer::common::eth_address +use miden::protocol::active_account +use miden::protocol::active_note +use miden::standards::faucets +use miden::standards::faucets::network_fungible + +# CONSTANTS +# ================================================================================================= + +# Storage slots for conversion metadata. +# Slot 1: [addr_felt0, addr_felt1, addr_felt2, addr_felt3] — first 4 felts of origin token address +const CONVERSION_INFO_1_SLOT = word("agglayer::faucet::conversion_info_1") +# Slot 2: [addr_felt4, origin_network, scale, 0] — remaining address felt + origin network + scale +const CONVERSION_INFO_2_SLOT = word("agglayer::faucet::conversion_info_2") + +# Storage slots for the pre-computed metadata hash (keccak256 of ABI-encoded token metadata). +# The 32-byte hash is split across two value slots, each holding 4 u32 felts. +const METADATA_HASH_LO_SLOT = word("agglayer::faucet::metadata_hash_lo") +const METADATA_HASH_HI_SLOT = word("agglayer::faucet::metadata_hash_hi") + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Returns the origin token address (5 felts) from faucet conversion storage. +#! +#! Reads conversion_info_1 (first 4 felts of address) and conversion_info_2 (5th felt) +#! from storage. +#! +#! Inputs: [] +#! Outputs: [addr0, addr1, addr2, addr3, addr4] +#! +#! Invocation: exec +pub proc get_origin_token_address + # TODO(migration): this procedure name is the same as the one in the bridge account, but they have different functions. + push.CONVERSION_INFO_1_SLOT[0..2] + exec.active_account::get_item + # => [addr0, addr1, addr2, addr3] + + # Read slot 2: [addr4, origin_network, scale, 0] + push.CONVERSION_INFO_2_SLOT[0..2] + exec.active_account::get_item + # => [addr4, origin_network, scale, 0, addr0, addr1, addr2, addr3] + + # Keep only addr4, drop origin_network, scale, 0 + movdn.7 drop drop drop + # => [addr0, addr1, addr2, addr3, addr4] +end + +#! Returns the origin network identifier from faucet conversion storage. +#! +#! Inputs: [] +#! Outputs: [origin_network] +#! +#! Invocation: exec +pub proc get_origin_network + push.CONVERSION_INFO_2_SLOT[0..2] + exec.active_account::get_item + # => [addr4, origin_network, scale, 0] + + drop movdn.2 drop drop + # => [origin_network] +end + +#! Returns the scale factor from faucet conversion storage. +#! +#! Inputs: [] +#! Outputs: [scale] +#! +#! Invocation: exec +proc get_scale_inner + push.CONVERSION_INFO_2_SLOT[0..2] + exec.active_account::get_item + # => [addr4, origin_network, scale, 0] + + drop drop swap drop + # => [scale] +end + +#! Returns the pre-computed metadata hash (8 u32 felts) from faucet storage. +#! +#! The metadata hash is `keccak256(abi.encode(name, symbol, decimals))` and is stored +#! across two value slots (lo and hi, 4 felts each). +#! +#! Inputs: [pad(16)] +#! Outputs: [METADATA_HASH_LO(4), METADATA_HASH_HI(4), pad(8)] +#! +#! Invocation: call +pub proc get_metadata_hash + push.METADATA_HASH_LO_SLOT[0..2] + exec.active_account::get_item + # => [lo0, lo1, lo2, lo3, pad(16)] + + push.METADATA_HASH_HI_SLOT[0..2] + exec.active_account::get_item + # => [hi0, hi1, hi2, hi3, lo0, lo1, lo2, lo3, pad(16)] + + # Rearrange: move hi below lo + swapw + # => [lo0, lo1, lo2, lo3, hi0, hi1, hi2, hi3, pad(16)] + + # Drop 8 excess padding elements (24 -> 16) + swapdw dropw dropw + # => [METADATA_HASH_LO(4), METADATA_HASH_HI(4), pad(8)] +end + +#! Returns the scale factor from faucet conversion storage. +#! +#! Called via FPI from the bridge account. +#! +#! Inputs: [pad(16)] +#! Outputs: [scale, pad(15)] +#! +#! Invocation: call +pub proc get_scale + exec.get_scale_inner + # => [scale, pad(16)] + + swap drop + # => [scale, pad(15)] +end + +#! Converts a native Miden asset amount to origin asset data using the stored +#! conversion metadata (origin_token_address, origin_network, and scale). +#! +#! This procedure is intended to be called via FPI from the bridge account. +#! It reads the faucet's conversion metadata from storage, scales the native amount +#! to U256 format, and returns the result along with origin token address and network. +#! +#! Inputs: [amount, pad(15)] +#! Outputs: [AMOUNT_U256[0], AMOUNT_U256[1], addr0, addr1, addr2, addr3, addr4, origin_network, pad(2)] +#! +#! Where: +#! - amount: The native Miden asset amount +#! - AMOUNT_U256: The scaled amount as 8 u32 limbs (little-endian U256) +#! - addr0..addr4: Origin token address (5 felts, u32 limbs) +#! - origin_network: Origin network identifier +#! +#! Invocation: call +pub proc asset_to_origin_asset + # => [amount, pad(15)] + + # Step 1: Get scale from storage + exec.get_scale_inner + # => [scale, amount, pad(15)] + swap + # => [amount, scale, pad(15)] + + # Step 2: Scale amount to U256 + exec.asset_conversion::scale_native_amount_to_u256 + exec.asset_conversion::reverse_limbs_and_change_byte_endianness + # => [U256_LO, U256_HI, pad(15)] + + # Step 3: Get origin token address + exec.get_origin_token_address + # => [addr0, addr1, addr2, addr3, addr4, U256_LO, U256_HI, pad(15)] + + # Move address below the U256 amount + repeat.5 movdn.12 end + # => [U256_LO, U256_HI, addr0, addr1, addr2, addr3, addr4, pad(15)] + + # Step 4: Get origin network + exec.get_origin_network + exec.utils::swap_u32_bytes + # => [origin_network, U256_LO, U256_HI, addr0..addr4, pad(15)] + + # Move origin_network after the address fields + movdn.13 + # => [U256_LO, U256_HI, addr0, addr1, addr2, addr3, addr4, origin_network, pad(15)] + + exec.sys::truncate_stack +end + +#! Burns the fungible asset from the active note. +#! +#! This procedure retrieves the asset from the active note and burns it. The note must contain +#! exactly one asset, which must be a fungible asset issued by this faucet. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the procedure is not called from a note context (active_note::get_assets will fail). +#! - the note does not contain exactly one asset. +#! - the transaction is executed against an account which is not a fungible asset faucet. +#! - the transaction is executed against a faucet which is not the origin of the specified asset. +#! - the amount about to be burned is greater than the outstanding supply of the asset. +#! +#! Invocation: call +pub use ::miden::standards::faucets::basic_fungible::burn + +#! Re-export the network fungible faucet's mint_and_send procedure. +#! +#! See `miden::standards::faucets::network_fungible::mint_and_send` for more details. +#! +#! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! Outputs: [note_idx, pad(15)] +#! +#! Invocation: call +pub use ::miden::standards::faucets::network_fungible::mint_and_send diff --git a/crates/miden-agglayer/asm/bridge/agglayer_faucet.masm b/crates/miden-agglayer/asm/bridge/agglayer_faucet.masm deleted file mode 100644 index ddf0e4b99c..0000000000 --- a/crates/miden-agglayer/asm/bridge/agglayer_faucet.masm +++ /dev/null @@ -1,279 +0,0 @@ -use miden::agglayer::bridge_in -use miden::agglayer::asset_conversion -use miden::agglayer::eth_address -use miden::protocol::active_account -use miden::protocol::active_note -use miden::standards::faucets -use miden::protocol::note -use miden::protocol::tx -use miden::core::mem -use miden::core::word - - -# CONSTANTS -# ================================================================================================= - -# The slot in this component's storage layout where the bridge account ID is stored. -const BRIDGE_ID_SLOT = word("miden::agglayer::faucet") - -const PROOF_DATA_WORD_LEN = 134 -const LEAF_DATA_WORD_LEN = 8 -const OUTPUT_NOTE_DATA_WORD_LEN = 2 - -const PROOF_DATA_START_PTR = 0 -const LEAF_DATA_START_PTR = 536 -const OUTPUT_NOTE_DATA_START_PTR = 568 - -# Memory Addresses -const PROOF_DATA_KEY_MEM_ADDR = 700 -const LEAF_DATA_KEY_MEM_ADDR = 704 -const OUTPUT_NOTE_DATA_MEM_ADDR = 708 -const CLAIM_NOTE_DATA_MEM_ADDR = 712 - -const OUTPUT_NOTE_INPUTS_MEM_ADDR = 0 -const OUTPUT_NOTE_TAG_MEM_ADDR = 574 -const OUTPUT_NOTE_SERIAL_NUM_MEM_ADDR = 568 -const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 = 552 -const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 = 556 - -const DESTINATION_ADDRESS_0 = 547 -const DESTINATION_ADDRESS_1 = 548 -const DESTINATION_ADDRESS_2 = 549 -const DESTINATION_ADDRESS_3 = 550 -const DESTINATION_ADDRESS_4 = 551 - -# P2ID output note constants -const P2ID_NOTE_NUM_STORAGE_ITEMS = 2 -const OUTPUT_NOTE_TYPE_PUBLIC = 1 -const EXECUTION_HINT_ALWAYS = 1 -const OUTPUT_NOTE_AUX = 0 - -const P2ID_OUTPUT_NOTE_AMOUNT_MEM_PTR = 611 -# ERRORS -# ================================================================================================= - -const ERR_INVALID_CLAIM_PROOF = "invalid claim proof" - -#! Inputs: [LEAF_DATA_KEY, PROOF_DATA_KEY] -#! Outputs: [] -#! -#! Panics if: -#! - the bridge account ID is not properly configured in storage. -#! - the foreign procedure invocation fails. -#! - the claim proof validation fails. -#! -#! Invocation: exec -proc validate_claim - # get bridge_in::verify_leaf_bridge procedure MAST root - procref.bridge_in::verify_leaf_bridge - # => [BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] - - push.BRIDGE_ID_SLOT[0..2] - # => [bridge_id_idx, BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] - - # get bridge account ID - exec.active_account::get_item - # => [bridge_account_id_prefix, bridge_account_id_suffix, 0, 0, BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] - - movup.2 drop movup.2 drop - # => [bridge_account_id_prefix, bridge_account_id_suffix, BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] - - # call bridge_in::verify_leaf_bridge - exec.tx::execute_foreign_procedure - # => [] -end - -# Inputs: [] -# Outputs: [U256[0], U256[1]] -proc get_raw_claim_amount - padw mem_loadw_be.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 - padw mem_loadw_be.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 -end - -# Inputs: [U256[0], U256[1]] -# Outputs: [amount] -proc scale_down_amount - repeat.7 drop end -end - -# Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] -# Outputs: [] -proc batch_pipe_double_words - # 1) Verify PROOF_DATA_KEY - mem_storew_be.PROOF_DATA_KEY_MEM_ADDR - adv.push_mapval - # => [PROOF_DATA_KEY] - - push.PROOF_DATA_START_PTR push.PROOF_DATA_WORD_LEN - exec.mem::pipe_double_words_preimage_to_memory drop - - # 2) Verify LEAF_DATA_KEY - mem_storew_be.LEAF_DATA_KEY_MEM_ADDR - adv.push_mapval - # => [LEAF_DATA_KEY] - - push.LEAF_DATA_START_PTR push.LEAF_DATA_WORD_LEN - exec.mem::pipe_double_words_preimage_to_memory drop - - # 3) Verify OUTPUT_NOTE_DATA_KEY - mem_storew_be.OUTPUT_NOTE_DATA_MEM_ADDR - adv.push_mapval - # => [OUTPUT_NOTE_DATA_KEY] - - push.OUTPUT_NOTE_DATA_START_PTR push.OUTPUT_NOTE_DATA_WORD_LEN - exec.mem::pipe_double_words_preimage_to_memory drop -end - -#! Extracts the destination account ID as address[5] from memory. -#! -#! This procedure reads the destination address from the leaf data and converts it from -#! Ethereum address format to AccountId format (prefix, suffix). -#! -#! Inputs: [] -#! Outputs: [prefix, suffix] -#! -#! Invocation: exec -proc get_destination_account_id_data - mem_load.DESTINATION_ADDRESS_4 - mem_load.DESTINATION_ADDRESS_3 - mem_load.DESTINATION_ADDRESS_2 - mem_load.DESTINATION_ADDRESS_1 - mem_load.DESTINATION_ADDRESS_0 - # => [address[5]] - - exec.eth_address::to_account_id - # => [prefix, suffix] -end - -#! Builds a P2ID output note for the claim recipient. -#! -#! This procedure expects the claim data to be already written to memory via batch_pipe_double_words. -#! It reads the destination account ID, amount, and other note parameters from memory to construct -#! the output note. -#! -#! Inputs: [] -#! Outputs: [] -#! -#! Note: This procedure will be refactored in a follow-up to use leaf data to build the output note. -proc build_p2id_output_note - # Build P2ID output note - procref.::miden::standards::notes::p2id::main - # => [SCRIPT_ROOT] - - swapw mem_loadw_be.OUTPUT_NOTE_SERIAL_NUM_MEM_ADDR - # => [SERIAL_NUM, SCRIPT_ROOT] - - push.P2ID_NOTE_NUM_STORAGE_ITEMS - # => [note_num_storage_items, SERIAL_NUM, SCRIPT_ROOT] - - push.OUTPUT_NOTE_INPUTS_MEM_ADDR - # => [storage_ptr = 0, note_num_storage_items, SERIAL_NUM, SCRIPT_ROOT] - - exec.get_destination_account_id_data - # => [prefix, suffix] - - # Write destination account id into memory - mem_store.1 mem_store.0 - # => [] - - exec.note::build_recipient - # => [RECIPIENT] - - push.OUTPUT_NOTE_TYPE_PUBLIC - # => [note_type, RECIPIENT] - - mem_load.OUTPUT_NOTE_TAG_MEM_ADDR - # => [tag, RECIPIENT] - - exec.get_raw_claim_amount - # => [AMOUNT[1], AMOUNT[0], tag, note_type, RECIPIENT] - - # TODO: implement scale down logic; stubbed out for now - exec.asset_conversion::scale_u256_to_native_amount - # => [amount, tag, note_type, RECIPIENT] - - exec.faucets::distribute - # => [pad(16)] -end - -#! Validates a claim against the AggLayer bridge and mints the corresponding asset to the recipient. -#! -#! This procedure validates the rollup exit root Merkle Proof via FPI against the agglayer bridge, -#! and if validation passes, mints the asset and creates an output note for the recipient. -#! -#! TODO: Expand this description to cover the double-spend protection mechanism in detail. -#! Double-spend can be prevented in two ways: -#! 1) While it's possible to create two identical P2ID notes, only one can actually be consumed. -#! If the claim note is consumed twice, only one P2ID output note will be successfully consumed. -#! 2) We can have a mapping in the bridge or in the faucet that stores consumed claim proofs -#! as a hash -> bool value (similar to how it's done in the agglayer solidity contract). -#! -#! Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY, pad(4)] -#! Outputs: [pad(16)] -#! -#! Advice map: { -#! PROOF_DATA_KEY => [ -#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) -#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) -#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) -#! ], -#! LEAF_DATA_KEY => [ -#! originNetwork[1], // Origin network identifier (1 felt, uint32) -#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) -#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) -#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) -#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) -#! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! ], -#! OUTPUT_NOTE_DATA_KEY => [ -#! output_p2id_serial_num[4], // P2ID note serial number (4 felts, Word) -#! agglayer_faucet_account_id[2], // Agglayer faucet account ID (2 felts, prefix and suffix) -#! output_note_tag[1], // P2ID output note tag -#! ] -#! } -#! -#! Panics if: -#! - the rollup exit root Merkle Proof validation via FPI fails. -#! - any of the validations in faucets::distribute fail. -#! -#! Invocation: call -pub proc claim - # Check AdviceMap values hash to keys & write CLAIM inputs & DATA_KEYs to global memory - exec.batch_pipe_double_words - # => [pad(16)] - - # VALIDATE CLAIM - mem_loadw_be.PROOF_DATA_KEY_MEM_ADDR - # => [PROOF_DATA_KEY, pad(12)] - swapw - mem_loadw_be.LEAF_DATA_KEY_MEM_ADDR - # => [LEAF_DATA_KEY, PROOF_DATA_KEY, pad(8)] - - # Errors on invalid proof - exec.validate_claim - # => [pad(16)] - - # Create P2ID output note - exec.build_p2id_output_note - # => [pad(16)] -end - -#! Burns the fungible asset from the active note. -#! -#! This procedure retrieves the asset from the active note and burns it. The note must contain -#! exactly one asset, which must be a fungible asset issued by this faucet. -#! -#! Inputs: [pad(16)] -#! Outputs: [pad(16)] -#! -#! Panics if: -#! - the procedure is not called from a note context (active_note::get_assets will fail). -#! - the note does not contain exactly one asset. -#! - the transaction is executed against an account which is not a fungible asset faucet. -#! - the transaction is executed against a faucet which is not the origin of the specified asset. -#! - the amount about to be burned is greater than the outstanding supply of the asset. -#! -#! Invocation: call -pub use ::miden::standards::faucets::basic_fungible::burn diff --git a/crates/miden-agglayer/asm/bridge/asset_conversion.masm b/crates/miden-agglayer/asm/bridge/asset_conversion.masm deleted file mode 100644 index e4f59f17d4..0000000000 --- a/crates/miden-agglayer/asm/bridge/asset_conversion.masm +++ /dev/null @@ -1,114 +0,0 @@ -use miden::core::math::u64 -use miden::core::word - -# CONSTANTS -# ================================================================================================= - -const MAX_SCALING_FACTOR=18 - -# ERRORS -# ================================================================================================= -const ERR_SCALE_AMOUNT_EXCEEDED_LIMIT="maximum scaling factor is 18" - -#! Calculate 10^scale where scale is a u8 exponent. -#! -#! Inputs: [scale] -#! Outputs: [10^scale] -#! -#! Where: -#! - scale is expected to be a small integer (0-18 typical for crypto decimals) -#! -#! Panics if: -#! - scale > 18 (overflow protection) -proc pow10 - u32assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT - # => [scale] - - dup u32lte.MAX_SCALING_FACTOR assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT - # => [scale] - - push.1 swap - # => [scale, result] - - dup neq.0 - # => [is_not_zero, scale, result] - - # Loop to calculate 10^scale - while.true - # => [scale, result] - - # result *= 10 - swap mul.10 swap - # => [scale, result*10] - - # scale -= 1 - sub.1 - # => [scale-1, result*10] - - dup neq.0 - # => [is_not_zero, scale-1, result*10] - end - # => [0, result] - - drop - # => [result] -end - -#! Convert an asset amount to a scaled U256 representation for bridging to Agglayer. -#! -#! This procedure is used to convert Miden asset amounts to EVM asset amounts. -#! It multiplies the input amount by 10^target_scale to adjust for decimal differences -#! between the current representation and the target chain's native decimals. -#! -#! The procedure first calculates 10^target_scale using the pow10 helper, then converts -#! both the amount and scale factor to U64 format, performs U64 multiplication, and -#! returns the result as 8 u32 limbs in little-endian order (U256 format). -#! -#! Inputs: [amount, target_scale] -#! Outputs: [[RESULT_U256[0], RESULT_U256[1]]] -#! -#! Where: -#! - amount: The asset amount to be converted (range: 0 to 2^63 - 2^31) -#! - target_scale: Exponent for scaling factor (10^target_scale) -#! - [RESULT_U256[0], RESULT_U256[1]]: U256 value as 8 u32 limbs in little-endian order -#! (least significant limb at the top of the stack, each limb stored in little-endian format) -#! -#! Examples: -#! - USDC: amount=1000000000, target_scale=0 → 1000000000 (no scaling) -#! - ETH: amount=1e10, target_scale=8 → 1e18 -#! -#! Invocation: exec -pub proc scale_native_amount_to_u256 - swap - # => [target_scale, amount] - - exec.pow10 - # => [scale, amount] - - u32split - # => [scale_hi, scale_lo, amount] - - movup.2 u32split - # => [amount_hi, amount_lo, scale_hi, scale_lo] - - # Perform U64 multiplication: amount * scale - # This is safe because both the scaling factor and amount are guaranteed to be smaller - # than 2^64, so we will never overflow a 256-bit value. - exec.u64::overflowing_mul - # => [res_hi, res_mid_hi, res_mid_lo, res_lo] - - exec.word::reverse - # => [res_lo, res_mid_lo, res_mid_hi, res_hi] - - # convert to U256 & little endian - padw swapw - # => [RESULT_U256[0], RESULT_U256[1]] -end - -#! TODO: implement scaling down -#! -#! Inputs: [U256[0], U256[1]] -#! Outputs: [amount] -pub proc scale_u256_to_native_amount - repeat.7 drop end -end diff --git a/crates/miden-agglayer/asm/bridge/bridge_in.masm b/crates/miden-agglayer/asm/bridge/bridge_in.masm deleted file mode 100644 index 734c2a2fd9..0000000000 --- a/crates/miden-agglayer/asm/bridge/bridge_in.masm +++ /dev/null @@ -1,211 +0,0 @@ -use miden::agglayer::crypto_utils -use miden::core::crypto::hashes::keccak256 -use miden::core::mem -use miden::protocol::active_account -use miden::protocol::native_account - -# ERRORS -# ================================================================================================= - -const ERR_BRIDGE_NOT_MAINNET = "bridge not mainnet" -const ERR_LEADING_BITS_NON_ZERO = "leading bits of global index must be zero" -const ERR_ROLLUP_INDEX_NON_ZERO = "rollup index must be zero for a mainnet deposit" -const ERR_SMT_ROOT_VERIFICATION_FAILED = "merkle proof verification failed: provided SMT root does not match the computed root" - -# CONSTANTS -# ================================================================================================= - -const PROOF_DATA_PTR = 0 -const PROOF_DATA_WORD_LEN = 134 -const SMT_PROOF_LOCAL_EXIT_ROOT_PTR = 0 # local SMT proof is first -const GLOBAL_INDEX_PTR = PROOF_DATA_PTR + 2 * 256 # 512 -const EXIT_ROOTS_PTR = GLOBAL_INDEX_PTR + 8 # 520 -const MAINNET_EXIT_ROOT_PTR = EXIT_ROOTS_PTR # it's the first exit root - -const GER_UPPER_STORAGE_SLOT=word("miden::agglayer::bridge::ger_upper") -const GER_LOWER_STORAGE_SLOT=word("miden::agglayer::bridge::ger_lower") - -# PUBLIC INTERFACE -# ================================================================================================= - -#! Updates the Global Exit Root (GER) in the bridge account storage. -#! -#! Inputs: [GER_LOWER[4], GER_UPPER[4], pad(8)] -#! Outputs: [pad(16)] -#! -#! Invocation: call -pub proc update_ger - push.GER_LOWER_STORAGE_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, GER_LOWER[4], GER_UPPER[4], pad(8)] - - exec.native_account::set_item - # => [OLD_VALUE, GER_UPPER[4], pad(8)] - - dropw - # => [GER_UPPER[4], pad(12)] - - push.GER_UPPER_STORAGE_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, GER_UPPER[4], pad(12)] - - exec.native_account::set_item - # => [OLD_VALUE, pad(12)] - - dropw - # => [pad(16)] -end - -#! Computes the leaf value and verifies it against the AggLayer bridge state. -#! -#! Verification is delegated to `verify_leaf` to mimic the AggLayer Solidity contracts. -#! The steps involved in verification are: -#! 1. Compute the GER from the mainnet and rollup exit roots. -#! 2. Assert that the computed GER is valid (exists in storage). -#! 3. Process the global index to determine if it's a mainnet or rollup deposit. -#! 4. Verify the Merkle proof for the provided leaf-index tuple against the computed GER. -#! -#! Inputs: -#! Operand stack: [LEAF_DATA_KEY, PROOF_DATA_KEY, pad(8)] -#! Advice map: { -#! PROOF_DATA_KEY => [ -#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) -#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) -#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) -#! ], -#! LEAF_DATA_KEY => [ -#! leafType[1], // Leaf type (1 felt, uint32) -#! originNetwork[1], // Origin network identifier (1 felt, uint32) -#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) -#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) -#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) -#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) -#! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! EMPTY_WORD // padding -#! ], -#! } -#! -#! Outputs: [pad(16)] -#! -#! Panics if: -#! - the computed GER is invalid (never injected). -#! - the global index is invalid. -#! - the Merkle proof for the provided leaf-index tuple against the computed GER is invalid. -#! -#! Invocation: call -pub proc verify_leaf_bridge - # get the leaf value. We have all the necessary leaf data in the advice map - exec.crypto_utils::get_leaf_value - # => [LEAF_VALUE[8], PROOF_DATA_KEY, pad(8)] - - movupw.3 dropw - # => [LEAF_VALUE[8], PROOF_DATA_KEY, pad(4)] - - # delegate proof verification - exec.verify_leaf - # => [pad(16)] -end - -# HELPER PROCEDURES -# ================================================================================================= - -#! Asserts that the provided GER is valid (exists in storage). -#! -#! Inputs: [GER_ROOT[8]] -#! Outputs: [] -#! -#! Invocation: exec -proc assert_valid_ger - # TODO verify that GER is in storage - dropw dropw -end - -#! Assert the global index is valid for a mainnet deposit. -#! -#! Inputs: [GLOBAL_INDEX[8]] -#! Outputs: [leaf_index] -#! -#! Panics if: -#! - the leading bits of the global index are not zero. -#! - the mainnet flag is not 1. -#! - the rollup index is not 0. -#! -#! Invocation: exec -pub proc process_global_index_mainnet - # for v0.1, let's only implement the mainnet branch - # the top 191 bits of the global index are zero - repeat.5 assertz.err=ERR_LEADING_BITS_NON_ZERO end - - # the next element is a u32 mainnet flag bit - # enforce that this limb is one - # => [mainnet_flag, GLOBAL_INDEX[6..8], LEAF_VALUE[8]] - assert.err=ERR_BRIDGE_NOT_MAINNET - - # the next element is a u32 rollup index, must be zero for a mainnet deposit - assertz.err=ERR_ROLLUP_INDEX_NON_ZERO - - # finally, the leaf index = lowest 32 bits = last limb - # => [leaf_index] -end - -#! Verify leaf and checks that it has not been claimed. -#! -#! Inputs: -#! Operand stack: [LEAF_VALUE[8], PROOF_DATA_KEY] -#! -#! Outputs: [] -#! -#! Panics if: -#! - the computed GER is invalid (never injected). -#! - the global index is invalid. -#! - the Merkle proof for the provided leaf-index tuple against the computed GER is invalid. -#! -#! Invocation: exec -proc verify_leaf - movupw.2 - # load proof data from the advice map into memory - adv.push_mapval - # => [PROOF_DATA_KEY, LEAF_VALUE[8]] - - push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR push.PROOF_DATA_WORD_LEN - exec.mem::pipe_preimage_to_memory drop - - # 1. compute GER from mainnet + rollup exit roots - push.EXIT_ROOTS_PTR - # => [exit_roots_ptr, LEAF_VALUE[8]] - exec.crypto_utils::compute_ger - # => [GER[8], LEAF_VALUE[8]] - - # 2. assert the GER is valid - exec.assert_valid_ger - # => [LEAF_VALUE[8]] - - # 3. load global index from memory - padw mem_loadw_le.GLOBAL_INDEX_PTR - padw push.GLOBAL_INDEX_PTR add.4 mem_loadw_le swapw - # => [GLOBAL_INDEX[8], LEAF_VALUE[8]] - - # to see if we're dealing with a deposit from mainnet or from a rollup, process the global index - # TODO currently only implemented for mainnet deposits (mainnet flag must be 1) - exec.process_global_index_mainnet - # => [leaf_index, LEAF_VALUE[8]] - - # load the pointers to the merkle proof and root, to pass to `verify_merkle_proof` - push.MAINNET_EXIT_ROOT_PTR swap - push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR - # => [smt_proof_ptr, leaf_index, mainnet_exit_root_ptr, LEAF_VALUE[8]] - - # prepare the stack for the crypto_utils::verify_merkle_proof procedure: move the pointers deep - # in the stack - movdn.10 movdn.10 movdn.10 - # => [LEAF_VALUE[8], smt_proof_ptr, leaf_index, mainnet_exit_root_ptr] - - # delegate verification to crypto_utils::verify_merkle_proof - exec.crypto_utils::verify_merkle_proof - # => [verification_flag] - - # verify_merkle_proof procedure returns `true` if the verification was successful and `false` - # otherwise. Assert that `true` was returned. - assert.err=ERR_SMT_ROOT_VERIFICATION_FAILED - # => [] -end diff --git a/crates/miden-agglayer/asm/bridge/bridge_out.masm b/crates/miden-agglayer/asm/bridge/bridge_out.masm deleted file mode 100644 index a4eb3a9ba2..0000000000 --- a/crates/miden-agglayer/asm/bridge/bridge_out.masm +++ /dev/null @@ -1,159 +0,0 @@ -use miden::protocol::active_note -use miden::protocol::note -use miden::standards::note_tag -use miden::protocol::output_note -use miden::core::crypto::hashes::keccak256 -use miden::core::crypto::hashes::rpo256 -use miden::core::word -use miden::agglayer::local_exit_tree - -# CONSTANTS -# ================================================================================================= -const MMR_PTR=42 -const LOCAL_EXIT_TREE_SLOT=word("miden::agglayer::let") - -const PUBLIC_NOTE=1 -const BURN_NOTE_NUM_STORAGE_ITEMS=0 -const BURN_ASSET_MEM_PTR=24 - -#! Computes the SERIAL_NUM of the outputted BURN note. -#! -#! The serial number is computed as hash(B2AGG_SERIAL_NUM, ASSET). -#! -#! Inputs: [ASSET] -#! Outputs: [SERIAL_NUM] -#! -#! Where: -#! - ASSET is the asset for which to compute the burn note serial number. -#! - SERIAL_NUM is the computed serial number for the BURN note. -#! -#! Invocation: exec -proc compute_burn_note_serial_num - exec.active_note::get_serial_number - # => [B2AGG_SERIAL_NUM, ASSET] - - exec.rpo256::merge - # => [SERIAL_NUM] -end - -#! Creates a BURN note for the specified asset. -#! -#! This procedure creates an output note that represents a burn operation for the given asset. -#! The note is configured with the appropriate recipient, tag, and execution hint. -#! -#! Inputs: [ASSET] -#! Outputs: [] -#! -#! Where: -#! - ASSET is the asset to be burned. -#! -#! Invocation: exec -@locals(8) -proc create_burn_note - loc_storew_be.0 dupw - # => [ASSET, ASSET] - - movup.2 drop movup.2 drop - # => [faucet_id_prefix, faucet_id_suffix, ASSET] - - exec.note_tag::create_account_target - # => [network_faucet_tag, ASSET] - - loc_store.5 - # => [ASSET] - - exec.compute_burn_note_serial_num - # => [SERIAL_NUM] - - procref.::miden::standards::notes::burn::main swapw - # => [SERIAL_NUM, SCRIPT_ROOT] - - push.BURN_NOTE_NUM_STORAGE_ITEMS push.0 - # => [storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT] - - exec.note::build_recipient - # => [RECIPIENT] - - push.PUBLIC_NOTE - loc_load.5 - # => [tag, note_type, RECIPIENT] - - call.output_note::create - # => [note_idx] - - movdn.4 loc_loadw_be.0 - # => [ASSET, note_idx] - - exec.output_note::add_asset - # => [] -end - -#! Bridges an asset out via the AggLayer -#! -#! This procedure handles the complete bridge-out operation, including: -#! - Converting asset data to u32 format -#! - Computing Keccak hash of the data -#! - Adding the hash to the MMR frontier -#! - Storing the updated MMR root in account storage -#! - Creating a BURN note with the bridged out asset -#! -#! Inputs: [ASSET, dest_network, dest_address(5)] -#! Outputs: [] -#! -#! Where: -#! - ASSET is the asset to be bridged out. -#! - dest_network is the u32 destination network/chain ID. -#! - dest_address(5) are 5 u32 values representing a 20-byte Ethereum address. -#! -#! Invocation: call -pub proc bridge_out - mem_storew_be.BURN_ASSET_MEM_PTR - # => [ASSET, dest_network, dest_address(5)] - - # @dev TODO: Look up asset faucet id in asset registry - # -> return scaling factor - - # @dev TODO: Convert ASSET amount to EVM amount using scaling factor - # -> return amount from here: https://github.com/0xMiden/protocol/pull/2141 - - # Converting SCALED_ASSET, dest_network, dest_address(5) to u32 representation - # in preparation for keccak256 hashing - - # keccak256 inputs: - # => [ASSET, dest_network, dest_address(5)] - # TODO we should convert Miden->Ethereum asset values, incl. amount conversion etc. - - # TODO: make building bridge message a separate procedure - # TODO: match Agglayer addLeafBridge logic - # TODO: convert Miden asset amount to Ethereum amount - # Store ASSET as u32 limbs in memory starting at address 0 - push.0 movdn.4 exec.word::store_word_u32s_le - # => [dest_network, dest_address(5)] - - # Store [dest_network, dest_address[0..3]] as u32 limbs in memory starting at address 8 - push.8 movdn.4 exec.word::store_word_u32s_le - # => [dest_address(2), 0, 0] - - # Store [dest_address[3..5], 0, 0] as u32 limbs in memory starting at address 16 - push.16 movdn.4 exec.word::store_word_u32s_le - # => [] - - # 1 u32 = 4 bytes - # 10 u32 values = 40 bytes - push.40 push.0 - # => [ptr, len_bytes] - - exec.keccak256::hash_bytes - # => [DIGEST_U32[8]] - - # adding DIGEST_U32 double word leaf to mmr frontier - exec.local_exit_tree::add_asset_message - # => [] - - # creating BURN output note for ASSET - mem_loadw_be.BURN_ASSET_MEM_PTR - # => [ASSET] - - exec.create_burn_note - # => [] -end diff --git a/crates/miden-agglayer/asm/bridge/canonical_zeros.masm b/crates/miden-agglayer/asm/bridge/canonical_zeros.masm deleted file mode 100644 index e693c4fa16..0000000000 --- a/crates/miden-agglayer/asm/bridge/canonical_zeros.masm +++ /dev/null @@ -1,142 +0,0 @@ -# This file is generated by build.rs, do not modify - -# This file contains the canonical zeros for the Keccak hash function. -# Zero of height `n` (ZERO_N) is the root of the binary tree of height `n` with leaves equal zero. -# -# Since the Keccak hash is represented by eight u32 values, each constant consists of two Words. - -const ZERO_0_L = [0, 0, 0, 0] -const ZERO_0_R = [0, 0, 0, 0] - -const ZERO_1_L = [3042949783, 3846789184, 2990541491, 2447652395] -const ZERO_1_R = [2532382527, 1151697986, 3453220726, 3056087725] - -const ZERO_2_L = [806175122, 2661877378, 3993486975, 3704028736] -const ZERO_2_R = [1186125340, 4132056164, 2406448277, 1360642484] - -const ZERO_3_L = [2243606276, 2319049635, 2778422344, 3686444836] -const ZERO_3_R = [836748766, 3055947948, 1063027030, 2746866977] - -const ZERO_4_L = [1150525734, 2360852476, 3881358125, 3462706719] -const ZERO_4_R = [224004420, 1513564138, 4058651434, 3010037733] - -const ZERO_5_L = [768598281, 293668224, 2114802790, 2680951561] -const ZERO_5_R = [523052921, 3386889228, 1344794057, 3206459406] - -const ZERO_6_L = [1746508463, 578821813, 283579568, 4134788524] -const ZERO_6_R = [756088757, 1715252246, 1087590535, 3173153928] - -const ZERO_7_L = [2205136186, 3475749318, 613780937, 1818541875] -const ZERO_7_R = [40140559, 91932979, 4234379492, 1459738623] - -const ZERO_8_L = [2941712185, 3321779339, 1227307046, 4069577285] -const ZERO_8_R = [611590243, 2128798138, 2473269631, 1607231384] - -const ZERO_9_L = [3763621903, 1154705673, 1903710296, 1972812290] -const ZERO_9_R = [4216691121, 4275626407, 3113795592, 3855940302] - -const ZERO_10_L = [2781069751, 774786966, 4112065289, 2182953470] -const ZERO_10_R = [3567589455, 861991663, 1356863200, 2134826233] - -const ZERO_11_L = [2465787000, 4149924453, 2720076317, 1467765009] -const ZERO_11_R = [1838648827, 866654147, 167150306, 1228583416] - -const ZERO_12_L = [2631517602, 171349786, 79648606, 4164671431] -const ZERO_12_R = [270336915, 2195882716, 3960096235, 3469119540] - -const ZERO_13_L = [3152187846, 1895984889, 2047814617, 1944734805] -const ZERO_13_R = [3551827087, 82830058, 326416580, 3649232833] - -const ZERO_14_L = [3435063385, 3598841737, 2762164692, 1894305546] -const ZERO_14_R = [3658789242, 3755895333, 49531590, 3618465628] - -const ZERO_15_L = [3525744215, 708101859, 2574387782, 3790037114] -const ZERO_15_R = [3700193742, 843132861, 3055060558, 2681109466] - -const ZERO_16_L = [530120689, 2718529082, 3981742412, 4194160956] -const ZERO_16_R = [4065390056, 824943129, 4207046226, 266679079] - -const ZERO_17_L = [2062522595, 650244466, 598998238, 1099357850] -const ZERO_17_R = [1543068721, 3603315816, 3833704967, 3367359457] - -const ZERO_18_L = [2692314236, 1072797208, 2923625471, 4157324078] -const ZERO_18_R = [746357617, 2400147060, 3144187786, 181284186] - -const ZERO_19_L = [2691355510, 1491476508, 3986541574, 2665487122] -const ZERO_19_R = [1032730592, 1039549588, 4164965877, 3056102068] - -const ZERO_20_L = [3803705507, 1732703975, 3478010394, 1535003327] -const ZERO_20_R = [4242360534, 719184416, 3062253412, 1167482566] - -const ZERO_21_L = [3655320222, 899251086, 3853444828, 1001466509] -const ZERO_21_R = [4045815225, 971767692, 1168258541, 2290434548] - -const ZERO_22_L = [2011403911, 3698331664, 3934089079, 946955861] -const ZERO_22_R = [3411854989, 1866109879, 418371072, 3692469338] - -const ZERO_23_L = [1390808632, 3168994683, 4234662665, 2053609922] -const ZERO_23_R = [2805567324, 2651248336, 696388782, 1078982733] - -const ZERO_24_L = [4011431532, 565969590, 1910056709, 4220355468] -const ZERO_24_R = [1681176506, 4292988995, 276516087, 2502281165] - -const ZERO_25_L = [2371989742, 3318538162, 999806777, 2066155765] -const ZERO_25_R = [1956437264, 2768897524, 1475191156, 3378167562] - -const ZERO_26_L = [3498569445, 3649628337, 1786802573, 2038831148] -const ZERO_26_R = [1678762243, 2385297319, 4030198639, 74763704] - -const ZERO_27_L = [516194684, 3360338824, 2165369292, 1916245748] -const ZERO_27_R = [3748991331, 1513828739, 3418759627, 1431735427] - -const ZERO_28_L = [787185022, 1571753335, 2366459736, 3067898230] -const ZERO_28_R = [79972070, 2975955312, 3165837101, 3722718822] - -const ZERO_29_L = [581144193, 3146618532, 1244629930, 2215341298] -const ZERO_29_R = [2551087773, 3876094376, 1909551909, 246581816] - -const ZERO_30_L = [903308566, 578217418, 2128594844, 1787682571] -const ZERO_30_R = [1078065138, 2904706143, 1223587258, 1350312851] - -const ZERO_31_L = [2840985724, 1653344606, 4049365781, 2389186238] -const ZERO_31_R = [3759582231, 2660540036, 1648733876, 2340505732] - -use ::miden::agglayer::mmr_frontier32_keccak::mem_store_double_word - -#! Inputs: [zeros_ptr] -#! Outputs: [] -pub proc load_zeros_to_memory - push.ZERO_0_L.ZERO_0_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_1_L.ZERO_1_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_2_L.ZERO_2_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_3_L.ZERO_3_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_4_L.ZERO_4_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_5_L.ZERO_5_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_6_L.ZERO_6_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_7_L.ZERO_7_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_8_L.ZERO_8_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_9_L.ZERO_9_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_10_L.ZERO_10_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_11_L.ZERO_11_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_12_L.ZERO_12_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_13_L.ZERO_13_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_14_L.ZERO_14_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_15_L.ZERO_15_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_16_L.ZERO_16_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_17_L.ZERO_17_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_18_L.ZERO_18_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_19_L.ZERO_19_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_20_L.ZERO_20_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_21_L.ZERO_21_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_22_L.ZERO_22_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_23_L.ZERO_23_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_24_L.ZERO_24_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_25_L.ZERO_25_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_26_L.ZERO_26_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_27_L.ZERO_27_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_28_L.ZERO_28_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_29_L.ZERO_29_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_30_L.ZERO_30_R exec.mem_store_double_word dropw dropw add.8 - push.ZERO_31_L.ZERO_31_R exec.mem_store_double_word dropw dropw add.8 - drop -end diff --git a/crates/miden-agglayer/asm/bridge/crypto_utils.masm b/crates/miden-agglayer/asm/bridge/crypto_utils.masm deleted file mode 100644 index 4a9534882d..0000000000 --- a/crates/miden-agglayer/asm/bridge/crypto_utils.masm +++ /dev/null @@ -1,197 +0,0 @@ -use miden::core::crypto::hashes::keccak256 -use miden::core::word -use miden::agglayer::utils -use miden::core::mem - -# TYPE ALIASES -# ================================================================================================= - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } -type DoubleWord = struct { word_lo: BeWord, word_hi: BeWord } -type MemoryAddress = u32 - -# CONSTANTS -# ================================================================================================= - -const LEAF_DATA_BYTES = 113 -const LEAF_DATA_NUM_WORDS = 8 -const LEAF_DATA_START_PTR = 0 - -# The offset of the first half of the current Keccak256 hash value in the local memory of the -# `calculate_root` procedure. -const CUR_HASH_LO_LOCAL = 0 - -# The offset of the second half of the current Keccak256 hash value in the local memory of the -# `calculate_root` procedure. -const CUR_HASH_HI_LOCAL = 4 - -# PUBLIC INTERFACE -# ================================================================================================= - -#! Given the leaf data key returns the leaf value. -#! -#! Inputs: -#! Operand stack: [LEAF_DATA_KEY] -#! Advice map: { -#! LEAF_DATA_KEY => [ -#! originNetwork[1], // Origin network identifier (1 felt, uint32) -#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) -#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) -#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) -#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) -#! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! ], -#! } -#! Outputs: [LEAF_VALUE[8]] -#! -#! Invocation: exec -pub proc get_leaf_value(leaf_data_key: BeWord) -> DoubleWord - adv.push_mapval - # => [LEAF_DATA_KEY] - - push.LEAF_DATA_START_PTR push.LEAF_DATA_NUM_WORDS - exec.mem::pipe_preimage_to_memory drop - # => [] - - push.LEAF_DATA_BYTES push.LEAF_DATA_START_PTR - # => [start_ptr, byte_len] - - exec.keccak256::hash_bytes - # => [LEAF_VALUE[8]] -end - -#! Computes the Global Exit Tree (GET) root from the mainnet and rollup exit roots. -#! -#! The mainnet exit root is expected at `exit_roots_ptr` and -#! the rollup exit root is expected at `exit_roots_ptr + 8`. -#! -#! Inputs: [exit_roots_ptr] -#! Outputs: [GER_ROOT[8]] -#! -#! Invocation: exec -pub proc compute_ger(exit_roots_ptr: MemoryAddress) -> DoubleWord - push.64 swap - # => [exit_roots_ptr, len_bytes] - exec.keccak256::hash_bytes - # => [GER_ROOT[8]] -end - -#! Verifies a Merkle proof for a leaf value against a root. -#! -#! Verifies that the root, computed using the provided Merkle path and the leaf with its index, -#! matches the provided root. -#! -#! Inputs: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx, expected_root_ptr] -#! Outputs: [verification_flag] -#! -#! Where: -#! - expected_root_ptr is the pointer to the memory where the expected SMT root is stored. -#! - [LEAF_VALUE_LO, LEAF_VALUE_HI] is the leaf for the provided Merkle path. -#! - merkle_path_ptr is the pointer to the memory where the merkle path is stored. This path is -#! represented as 32 Keccak256Digest values (64 words). -#! - leaf_idx is the index of the provided leaf in the SMT. -#! - [ROOT_LO, ROOT_HI] is the calculated root. -#! - verification_flag is the binary flag indicating whether the verification was successful. -pub proc verify_merkle_proof( - leaf_value: DoubleWord, - merkle_path_ptr: MemoryAddress, - leaf_idx: u32, - expected_root_ptr: MemoryAddress -) -> i1 - # calculate the root of the SMT - exec.calculate_root - # => [CALCULATED_ROOT_LO, CALCULATED_ROOT_HI, expected_root_ptr] - - # load the expected root onto the stack - movup.8 exec.utils::mem_load_double_word - # => [EXPECTED_ROOT_LO, EXPECTED_ROOT_HI, CALCULATED_ROOT_LO, CALCULATED_ROOT_HI] - - # assert the roots are equal - swapw.3 exec.word::eq - # => [exp_hi_equal_calc_hi, CALCULATED_ROOT_LO, EXPECTED_ROOT_LO] - - movdn.8 exec.word::eq and - # => [verification_flag] -end - -# HELPER PROCEDURES -# ================================================================================================= - -#! Computes the root of the SMT based on the provided Merkle path, leaf value and leaf index. -#! -#! Inputs: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx] -#! Outputs: [ROOT_LO, ROOT_HI] -#! -#! Where: -#! - [LEAF_VALUE_LO, LEAF_VALUE_HI] is the leaf for the provided Merkle path. -#! - merkle_path_ptr is the pointer to the memory where the merkle path is stored. This path is -#! represented as 32 Keccak256Digest values (64 words). -#! - leaf_idx is the index of the provided leaf in the SMT. -#! - [ROOT_LO, ROOT_HI] is the calculated root. -@locals(9) # current hash + is_odd flag -proc calculate_root( - leaf_value: DoubleWord, - merkle_path_ptr: MemoryAddress, - leaf_idx: u32 -) -> DoubleWord - # Local memory stores the current hash. It is initialized to the leaf value - loc_storew_be.CUR_HASH_LO_LOCAL dropw loc_storew_be.CUR_HASH_HI_LOCAL dropw - # => [merkle_path_ptr, leaf_idx] - - # prepare the stack for the hash computation cycle - padw padw padw - # => [PAD, PAD, PAD, merkle_path_ptr, leaf_idx] - - # Merkle path is guaranteed to contain 32 nodes - repeat.32 - # load the Merkle path node onto the stack - mem_stream - # => [PATH_NODE_LO, PATH_NODE_HI, PAD, merkle_path_ptr, leaf_idx] - - # determine whether the last `leaf_idx` bit is 1 (is `leaf_idx` odd) - dup.13 u32and.1 - # => [is_odd, PATH_NODE_LO, PATH_NODE_HI, PAD, merkle_path_ptr, leaf_idx] - - # store the is_odd flag to the local memory, so we could use it while all 16 top elements - # are occupied by the nodes - loc_store.8 - # => [PATH_NODE_LO, PATH_NODE_HI, PAD, merkle_path_ptr, leaf_idx] - - # load the hash respective to the current height from the local memory - padw loc_loadw_be.CUR_HASH_HI_LOCAL padw loc_loadw_be.CUR_HASH_LO_LOCAL - # => [CURR_HASH_LO, CURR_HASH_HI, PATH_NODE_LO, PATH_NODE_HI, PAD, merkle_path_ptr, leaf_idx] - - # load the is_odd flag back to the stack - loc_load.8 - # => [is_odd, CURR_HASH_LO, CURR_HASH_HI, PATH_NODE_LO, PATH_NODE_HI, PAD, merkle_path_ptr, leaf_idx] - - # if is_odd flag equals 1 (`leaf_idx` is odd), change the order of the nodes on the stack - if.true - # rearrange the hashes: current position of the hash is odd, so it should be on the - # right - swapdw - # => [PATH_NODE_LO, PATH_NODE_HI, CURR_HASH_LO, CURR_HASH_HI, PAD, merkle_path_ptr, leaf_idx] - end - - # compute the next height hash - exec.keccak256::merge - # => [CURR_HASH_LO', CURR_HASH_HI', PAD, merkle_path_ptr, leaf_idx] - - # store the resulting hash to the local memory - loc_storew_be.CUR_HASH_LO_LOCAL swapw loc_storew_be.CUR_HASH_HI_LOCAL - # => [CURR_HASH_HI', CURR_HASH_LO', PAD, merkle_path_ptr, leaf_idx] - - # update the `leaf_idx` (shift it right by 1 bit) - movup.13 u32shr.1 movdn.13 - # => [CURR_HASH_HI', CURR_HASH_LO', PAD, merkle_path_ptr, leaf_idx>>1] - end - - # after all 32 hashes have been computed, the current hash stored in local memory represents - # the root of the SMT, which should be returned - # - # remove 6 elements from the stack so that exactly 8 are remaining and rewrite them with the - # root value from the local memory - dropw drop drop - loc_loadw_be.CUR_HASH_HI_LOCAL swapw loc_loadw_be.CUR_HASH_LO_LOCAL - # => [ROOT_LO, ROOT_HI] -end diff --git a/crates/miden-agglayer/asm/bridge/eth_address.masm b/crates/miden-agglayer/asm/bridge/eth_address.masm deleted file mode 100644 index 57a8e9f298..0000000000 --- a/crates/miden-agglayer/asm/bridge/eth_address.masm +++ /dev/null @@ -1,88 +0,0 @@ -use miden::core::crypto::hashes::keccak256 -use miden::core::word - -# CONSTANTS -# ================================================================================================= - -const U32_MAX=4294967295 -const TWO_POW_32=4294967296 - -const ERR_NOT_U32="address limb is not u32" -const ERR_ADDR4_NONZERO="most-significant 4 bytes (addr4) must be zero" -const ERR_FELT_OUT_OF_FIELD="combined u64 doesn't fit in field" - - -# ETHEREUM ADDRESS PROCEDURES -# ================================================================================================= - -#! Builds a single felt from two u32 limbs (little-endian limb order). -#! Conceptually, this is packing a 64-bit word (lo + (hi << 32)) into a field element. -#! This proc additionally verifies that the packed value did *not* reduce mod p by round-tripping -#! through u32split and comparing the limbs. -#! -#! Inputs: [lo, hi] -#! Outputs: [felt] -proc build_felt - # --- validate u32 limbs --- - u32assert2.err=ERR_NOT_U32 - # => [lo, hi] - - # keep copies for the overflow check - dup.1 dup.1 - # => [lo, hi, lo, hi] - - # felt = (hi * 2^32) + lo - swap - push.TWO_POW_32 mul - add - # => [felt, lo, hi] - - # ensure no reduction mod p happened: - # split felt back into (hi, lo) and compare to inputs - dup u32split - # => [hi2, lo2, felt, lo, hi] - - movup.4 assert_eq.err=ERR_FELT_OUT_OF_FIELD - # => [lo2, felt, lo] - - movup.2 assert_eq.err=ERR_FELT_OUT_OF_FIELD - # => [felt] -end - -#! Converts an Ethereum address format (address[5] type) back into an AccountId [prefix, suffix] type. -#! -#! The Ethereum address format is represented as 5 u32 limbs (20 bytes total) in *little-endian limb order*: -#! addr0 = bytes[16..19] (least-significant 4 bytes) -#! addr1 = bytes[12..15] -#! addr2 = bytes[ 8..11] -#! addr3 = bytes[ 4.. 7] -#! addr4 = bytes[ 0.. 3] (most-significant 4 bytes) -#! -#! The most-significant 4 bytes must be zero for a valid AccountId conversion (addr4 == 0). -#! The remaining 16 bytes are treated as two 8-byte words (conceptual u64 values): -#! prefix = (addr3 << 32) | addr2 # bytes[4..11] -#! suffix = (addr1 << 32) | addr0 # bytes[12..19] -#! -#! These 8-byte words are represented as field elements by packing two u32 limbs into a felt. -#! The packing is done via build_felt, which validates limbs are u32 and checks the packed value -#! did not reduce mod p (i.e. the word fits in the field). -#! -#! Inputs: [addr0, addr1, addr2, addr3, addr4] -#! Outputs: [prefix, suffix] -#! -#! Invocation: exec -pub proc to_account_id - # addr4 must be 0 (most-significant limb) - movup.4 - eq.0 assert.err=ERR_ADDR4_NONZERO - # => [addr0, addr1, addr2, addr3] - - exec.build_felt - # => [suffix, addr2, addr3] - - movdn.2 - # => [addr2, addr3, suffix] - - exec.build_felt - # => [prefix, suffix] -end diff --git a/crates/miden-agglayer/asm/bridge/local_exit_tree.masm b/crates/miden-agglayer/asm/bridge/local_exit_tree.masm deleted file mode 100644 index 89e744507b..0000000000 --- a/crates/miden-agglayer/asm/bridge/local_exit_tree.masm +++ /dev/null @@ -1,120 +0,0 @@ -use miden::protocol::active_account -use miden::protocol::native_account - -# CONSTANTS -# ================================================================================================= - -const MMR_PTR=42 -const LOCAL_EXIT_TREE_SLOT=word("miden::agglayer::let") - -#! Adds a leaf to the MMR frontier using Keccak hashing (stubbed implementation). -#! -#! This is a stubbed implementation that currently drops all inputs without performing -#! the actual MMR frontier addition operation. -#! -#! Inputs: [LEAF[1], LEAF[0], mmr_ptr] -#! Outputs: [] -#! -#! Where: -#! - LEAF[1], LEAF[0] are the leaf data to add to the MMR frontier. -#! - mmr_ptr is the pointer to the MMR frontier data structure. -#! -#! Invocation: exec -proc mmr_frontier_keccak_add - dropw dropw drop - # => [] -end - -#! Gets the root of the MMR frontier using Keccak hashing (stubbed implementation). -#! -#! This is a stubbed implementation that returns placeholder values instead of -#! computing the actual MMR frontier root. -#! -#! Inputs: [mmr_ptr] -#! Outputs: [ROOT[1], ROOT[0]] -#! -#! Where: -#! - ROOT[1], ROOT[0] are the root hash components of the MMR frontier whose memory location starts at mmr_ptr -#! -#! Invocation: exec -pub proc mmr_frontier_keccak_get_root - # stubbed out for now - drop - # => [] - - push.0.0.0.1 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY] - - exec.active_account::get_map_item - # => [ROOT[0]] - - push.0.0.0.0 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, ROOT[0]] - - exec.active_account::get_map_item - # => [ROOT[1], ROOT[0]] -end - -#! Writes the MMR frontier root to account storage. -#! -#! This procedure retrieves the current MMR frontier root and stores it as a double word -#! in the account's storage map. The root is split across two storage keys: -#! - Key [0,0,0,0] stores ROOT[1] (high part) -#! - Key [0,0,0,1] stores ROOT[0] (low part) -#! -#! Inputs: [] -#! Outputs: [] -#! -#! Invocation: exec -proc write_mmr_frontier_root - push.MMR_PTR - # => [MMR_PTR] - - # getting mmr frontier root - exec.mmr_frontier_keccak_get_root - # => [ROOT[1], ROOT[0]] - - # writing double word root to map keys [0,0,0,0] & [0,0,0,1] - push.0.0.0.0 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [index, KEY, ROOT[1], ROOT[0]] - - exec.native_account::set_map_item - # => [OLD_MAP_ROOT, OLD_MAP_VALUE, ROOT[0]] - - dropw dropw - # => [ROOT[0]] - - push.1.0.0.0 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [index, KEY, ROOT[0]] - - exec.native_account::set_map_item - # => [OLD_MAP_ROOT, OLD_MAP_VALUE] - - dropw dropw - # => [] -end - -#! Adds an asset message to the MMR frontier and updates the stored root. -#! -#! This procedure takes a Keccak digest (represented as 8 u32 values) and adds it -#! as a leaf to the MMR frontier. After adding the leaf, it updates the MMR root -#! in the account's storage to reflect the new state. -#! -#! Inputs: [DIGEST_U32[8]] -#! Outputs: [] -#! -#! Where: -#! - DIGEST_U32[8] is a Keccak256 hash represented as 8 u32 values (256 bits total). -#! -#! Invocation: exec -pub proc add_asset_message - push.MMR_PTR movdn.8 - # => [LEAF[1], LEAF[0], mmr_ptr] - - exec.mmr_frontier_keccak_add - # => [] - - exec.write_mmr_frontier_root - # => [] -end - diff --git a/crates/miden-agglayer/asm/bridge/utils.masm b/crates/miden-agglayer/asm/bridge/utils.masm deleted file mode 100644 index 598a392509..0000000000 --- a/crates/miden-agglayer/asm/bridge/utils.masm +++ /dev/null @@ -1,38 +0,0 @@ -# Utility module containing helper procedures for the double word handling. - -# TYPE ALIASES -# ================================================================================================= - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } -type DoubleWord = struct { word_lo: BeWord, word_hi: BeWord } -type MemoryAddress = u32 - -# PUBLIC INTERFACE -# ================================================================================================= - -#! Stores two words to the provided global memory address. -#! -#! Inputs: [WORD_1, WORD_2, ptr] -#! Outputs: [WORD_1, WORD_2, ptr] -pub proc mem_store_double_word( - double_word_to_store: DoubleWord, - mem_ptr: MemoryAddress -) -> (DoubleWord, MemoryAddress) - dup.8 mem_storew_be swapw - # => [WORD_2, WORD_1, ptr] - - dup.8 add.4 mem_storew_be swapw - # => [WORD_1, WORD_2, ptr] -end - -#! Loads two words from the provided global memory address. -#! -#! Inputs: [ptr] -#! Outputs: [WORD_1, WORD_2] -pub proc mem_load_double_word(mem_ptr: MemoryAddress) -> DoubleWord - padw dup.4 add.4 mem_loadw_be - # => [WORD_2, ptr] - - padw movup.8 mem_loadw_be - # => [WORD_1, WORD_2] -end diff --git a/crates/miden-agglayer/asm/components/bridge.masm b/crates/miden-agglayer/asm/components/bridge.masm new file mode 100644 index 0000000000..15f10fd4b6 --- /dev/null +++ b/crates/miden-agglayer/asm/components/bridge.masm @@ -0,0 +1,9 @@ +# The MASM code of the AggLayer Bridge Account Component. +# +# This is a thin wrapper that re-exports bridge-related procedures from the +# agglayer library. + +pub use ::agglayer::bridge::bridge_config::register_faucet +pub use ::agglayer::bridge::bridge_config::update_ger +pub use ::agglayer::bridge::bridge_in::claim +pub use ::agglayer::bridge::bridge_out::bridge_out diff --git a/crates/miden-agglayer/asm/components/faucet.masm b/crates/miden-agglayer/asm/components/faucet.masm new file mode 100644 index 0000000000..ffa33f399e --- /dev/null +++ b/crates/miden-agglayer/asm/components/faucet.masm @@ -0,0 +1,15 @@ +# The MASM code of the AggLayer Faucet Account Component. +# +# This is a thin wrapper that re-exports faucet-related procedures from the +# agglayer library. The faucet exposes: +# - `mint_and_send` from the network fungible faucet (for MINT note consumption, with owner verification) +# - `asset_to_origin_asset` for bridge-out FPI +# - `get_metadata_hash` for bridge-out FPI (metadata hash retrieval) +# - `get_scale` for bridge-in FPI (amount verification) +# - `burn` for bridge-out + +pub use ::agglayer::faucet::mint_and_send +pub use ::agglayer::faucet::asset_to_origin_asset +pub use ::agglayer::faucet::get_metadata_hash +pub use ::agglayer::faucet::get_scale +pub use ::agglayer::faucet::burn diff --git a/crates/miden-agglayer/asm/note_scripts/B2AGG.masm b/crates/miden-agglayer/asm/note_scripts/B2AGG.masm index 9523160e9c..bc5ee0931b 100644 --- a/crates/miden-agglayer/asm/note_scripts/B2AGG.masm +++ b/crates/miden-agglayer/asm/note_scripts/B2AGG.masm @@ -1,7 +1,9 @@ -use miden::agglayer::bridge_out +use agglayer::bridge::bridge_out use miden::protocol::account_id use miden::protocol::active_account use miden::protocol::active_note +use miden::protocol::asset +use miden::protocol::asset::ASSET_VALUE_MEMORY_OFFSET use miden::protocol::note use miden::standards::attachments::network_account_target use miden::standards::wallets::basic->basic_wallet @@ -9,8 +11,12 @@ use miden::standards::wallets::basic->basic_wallet # CONSTANTS # ================================================================================================= +const ASSET_PTR=0 const B2AGG_NOTE_NUM_STORAGE_ITEMS=6 +const STORAGE_START_PTR=8 +const STORAGE_END_PTR=STORAGE_START_PTR + 8 + # ERRORS # ================================================================================================= const ERR_B2AGG_WRONG_NUMBER_OF_ASSETS="B2AGG script requires exactly 1 note asset" @@ -30,14 +36,14 @@ const ERR_B2AGG_TARGET_ACCOUNT_MISMATCH="B2AGG note attachment target account do #! Inputs: [] #! Outputs: [] #! -#! Note storage is assumed to be as follows: -#! - destination_network: u32 value representing the target chain ID -#! - destination_address: split into 5 u32 values representing a 20-byte Ethereum address: -#! - destination_address_0: bytes 0-3 -#! - destination_address_1: bytes 4-7 -#! - destination_address_2: bytes 8-11 -#! - destination_address_3: bytes 12-15 -#! - destination_address_4: bytes 16-19 +#! Note storage layout (6 felts total): +#! - destination_network [0] : 1 felt +#! - destination_address [1..5] : 5 felts +#! +#! Where: +#! - destination_network: Destination network identifier (uint32) +#! - destination_address: 20-byte Ethereum address as 5 u32 felts +#! #! Note attachment is constructed from a NetworkAccountTarget standard: #! - [0, exec_hint_tag, target_id_prefix, target_id_suffix] #! @@ -71,14 +77,14 @@ begin # => [pad(16)] # Store note storage -> mem[8..14] - push.8 exec.active_note::get_storage + push.STORAGE_START_PTR exec.active_note::get_storage # => [num_storage_items, dest_ptr, pad(16)] push.B2AGG_NOTE_NUM_STORAGE_ITEMS assert_eq.err=ERR_B2AGG_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS drop # => [pad(16)] - # Store note assets -> mem[0..4] - push.0 exec.active_note::get_assets + # Store note assets -> mem[0..8] + push.ASSET_PTR exec.active_note::get_assets # => [num_assets, ptr, pad(16)] # Must be exactly 1 asset @@ -86,14 +92,17 @@ begin # => [pad(16)] # load the 6 B2AGG felts from note storage as two words - mem_loadw_be.12 swapw.2 mem_loadw_be.8 swapw - # => [EMPTY_WORD, dest_network, dest_address(5), pad(6)] + push.STORAGE_START_PTR add.4 mem_loadw_le swapw mem_loadw_le.STORAGE_START_PTR + # => [dest_network, dest_address(5), pad(10)] - # Load ASSET onto the stack - mem_loadw_be.0 - # => [ASSET, dest_network, dest_address(5), pad(6)] + # Load asset onto the stack from ASSET_PTR + push.ASSET_PTR exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, dest_network, dest_address(5), pad(10)] call.bridge_out::bridge_out + # => [pad(24)] + + dropw dropw # => [pad(16)] end # => [pad(16)] diff --git a/crates/miden-agglayer/asm/note_scripts/CLAIM.masm b/crates/miden-agglayer/asm/note_scripts/CLAIM.masm index e213a9f1ff..036b34298e 100644 --- a/crates/miden-agglayer/asm/note_scripts/CLAIM.masm +++ b/crates/miden-agglayer/asm/note_scripts/CLAIM.masm @@ -1,11 +1,10 @@ -use miden::agglayer::agglayer_faucet -> agg_faucet -use miden::protocol::account_id -use miden::protocol::active_account +use agglayer::bridge::bridge_in -> bridge use miden::protocol::active_note use miden::protocol::note use miden::core::crypto::hashes::keccak256 -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem +use miden::standards::attachments::network_account_target # CONSTANTS # ================================================================================================= @@ -16,47 +15,20 @@ const OUTPUT_NOTE_SIZE = 8 const PROOF_DATA_START_PTR = 0 const LEAF_DATA_START_PTR = 536 -const OUTPUT_NOTE_DATA_START_PTR = 568 - -const TARGET_FAUCET_PREFIX_MEM_ADDR = 572 -const TARGET_FAUCET_SUFFIX_MEM_ADDR = 573 +const FAUCET_MINT_AMOUNT = 568 # ERRORS # ================================================================================================= -const ERR_CLAIM_TARGET_ACCT_MISMATCH = "CLAIM's target account address and transaction address do not match" +const ERR_CLAIM_TARGET_ACCT_MISMATCH = "CLAIM note attachment target account does not match consuming account" -#! Asserts that the consuming account matches the target agglayer faucet account. -#! -#! This procedure ensures that only the specified agglayer faucet account can consume -#! this CLAIM note. It assumes that the note storage has already been loaded into memory -#! via active_note::get_storage. -#! -#! Inputs: [] -#! Output: [] +#! Reads claim data from memory and inserts it into the advice map under two separate keys. #! -#! Panics if: -#! - The consuming account ID does not match the target faucet account ID stored in memory -proc assert_aggfaucet_is_consumer - # Load target faucet ID (assumes active_note::get_storage has been called) - mem_load.TARGET_FAUCET_SUFFIX_MEM_ADDR mem_load.TARGET_FAUCET_PREFIX_MEM_ADDR - # => [target_faucet_prefix, target_faucet_suffix] - - exec.active_account::get_id - # => [account_id_prefix, account_id_suffix, target_faucet_prefix, target_faucet_suffix] - - # ensure only the specified target faucet can consume this CLAIM note, not any other account - exec.account_id::is_equal assert.err=ERR_CLAIM_TARGET_ACCT_MISMATCH - # => [] -end - -#! Reads claim data from memory and inserts it into the advice map under three separate keys. -#! -#! This procedure organizes the claim note data into three logical groups and inserts them +#! This procedure organizes the claim note data into two logical groups and inserts them #! into the advice map under separate keys for easier access. #! #! Inputs: [] -#! Outputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] +#! Outputs: [PROOF_DATA_KEY, LEAF_DATA_KEY] #! #! Advice map entries created: #! PROOF_DATA_KEY => [ @@ -69,102 +41,77 @@ end #! #! LEAF_DATA_KEY => [ #! leafType[1], // Leaf type (1 felt, uint32) -#! padding[3], // padding (3 felts) #! originNetwork[1], // Origin network identifier (1 felt, uint32) #! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) #! destinationNetwork[1], // Destination network identifier (1 felt, uint32) #! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) #! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) #! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! ] -#! -#! TODO: Will be removed in future PR -#! OUTPUT_NOTE_DATA_KEY => [ -#! output_p2id_serial_num[4], // P2ID note serial number (4 felts, Word) -#! target_faucet_account_id[2], // Target faucet account ID (2 felts, prefix and suffix) -#! output_note_tag[1], // P2ID output note tag -#! padding[1], // padding (1 felt) +#! padding[3], // padding (3 felts) #! ] #! #! Invocation: exec proc write_claim_data_into_advice_map_by_key - # 1) Get OUTPUT_NOTE_DATA_KEY - push.OUTPUT_NOTE_SIZE push.OUTPUT_NOTE_DATA_START_PTR - exec.rpo256::hash_elements - # => [OUTPUT_NOTE_DATA_KEY] - - push.OUTPUT_NOTE_SIZE add.OUTPUT_NOTE_DATA_START_PTR push.OUTPUT_NOTE_DATA_START_PTR - movdn.5 movdn.5 - # => [OUTPUT_NOTE_DATA_KEY, start_ptr, end_ptr] - - adv.insert_mem - # OS => [OUTPUT_NOTE_DATA_KEY, start_ptr, end_ptr, pad(16)] - # AM => {OUTPUT_NOTE_DATA_KEY: mem[start_ptr..end_ptr] } - - movup.4 drop movup.4 drop - # => [OUTPUT_NOTE_DATA_KEY] - - # 2) Get LEAF_DATA_KEY + # 1) Get LEAF_DATA_KEY push.LEAF_DATA_SIZE push.LEAF_DATA_START_PTR - exec.rpo256::hash_elements - # => [LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + exec.poseidon2::hash_elements + # => [LEAF_DATA_KEY] push.LEAF_DATA_SIZE add.LEAF_DATA_START_PTR push.LEAF_DATA_START_PTR movdn.5 movdn.5 - # => [LEAF_DATA_KEY, start_ptr, end_ptr, OUTPUT_NOTE_DATA_KEY] + # => [LEAF_DATA_KEY, start_ptr, end_ptr] adv.insert_mem # OS => [LEAF_DATA_KEY, start_ptr, end_ptr] # AM => {LEAF_DATA_KEY: mem[start_ptr..end_ptr] } movup.4 drop movup.4 drop - # => [LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # => [LEAF_DATA_KEY] - # 3) Get PROOF_DATA_KEY + # 2) Get PROOF_DATA_KEY push.PROOF_DATA_SIZE push.PROOF_DATA_START_PTR - exec.rpo256::hash_elements - # => [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + exec.poseidon2::hash_elements + # => [PROOF_DATA_KEY, LEAF_DATA_KEY] push.PROOF_DATA_SIZE push.PROOF_DATA_START_PTR movdn.5 movdn.5 - # => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY] adv.insert_mem - # OS => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # OS => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY] # AM => {PROOF_DATA_KEY: mem[start_ptr..end_ptr] } movup.4 drop movup.4 drop - # => [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # => [PROOF_DATA_KEY, LEAF_DATA_KEY] end -#! Agglayer Faucet CLAIM script: claims assets by calling the agglayer faucet's claim function. +#! Agglayer Bridge CLAIM script: claims assets by calling the bridge's claim function. #! -#! This note can only be consumed by the specific agglayer faucet account whose ID is provided -#! in the note storage (target_faucet_account_id). Upon consumption, it will create a P2ID note. +#! This note is consumed by the agglayer bridge account whose ID is provided +#! in the note attachment (NetworkAccountTarget). Upon consumption, the bridge validates +#! the Merkle proof, looks up the faucet from the token registry, and creates a MINT note +#! targeting the aggfaucet. #! #! Requires that the account exposes: -#! - agglayer::agglayer_faucet::claim procedure. +#! - agglayer::bridge::bridge_in::claim procedure. #! #! Inputs: [ARGS, pad(12)] #! Outputs: [pad(16)] #! -#! NoteStorage layout (576 felts total): +#! NoteStorage layout (569 felts total): #! - smtProofLocalExitRoot [0..255] : 256 felts #! - smtProofRollupExitRoot [256..511]: 256 felts #! - globalIndex [512..519]: 8 felts #! - mainnetExitRoot [520..527]: 8 felts #! - rollupExitRoot [528..535]: 8 felts #! - leafType [536] : 1 felt -#! - padding [537..539]: 3 felts -#! - originNetwork [540] : 1 felt -#! - originTokenAddress [541..545]: 5 felts -#! - destinationNetwork [546] : 1 felt -#! - destinationAddress [547..551]: 5 felts -#! - amount [552..559]: 8 felts -#! - metadata [560..567]: 8 felts -#! - output_p2id_serial_num [568..571]: 4 felts -#! - target_faucet_account_id [572..573]: 2 felts -#! - output_note_tag [574] : 1 felt -#! - padding [575] : 1 felt +#! - originNetwork [537] : 1 felt +#! - originTokenAddress [538..542]: 5 felts +#! - destinationNetwork [543] : 1 felt +#! - destinationAddress [544..548]: 5 felts +#! - amount [549..556]: 8 felts +#! - metadata [557..564]: 8 felts +#! - padding [565..567]: 3 felts +#! - miden_claim_amount [568] : 1 felt #! #! Where: #! - smtProofLocalExitRoot: SMT proof for local exit root (bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) @@ -180,38 +127,43 @@ end #! - originNetwork: Origin network identifier (uint32) #! - originTokenAddress: Origin token address (address as 5 u32 felts) #! - destinationNetwork: Destination network identifier (uint32) -#! - destinationAddress: Destination address (address as 5 u32 felts) +#! - destinationAddress: 20-byte Ethereum address decodable into a Miden AccountId (5 u32 felts) #! - amount: Amount of tokens (uint256 as 8 u32 felts) #! - metadata: ABI encoded metadata (fixed size) -#! - padding (3 felts) -#! - output_p2id_serial_num: P2ID note serial number (Word) -#! - target_faucet_account_id: Target agglayer faucet account ID (prefix and suffix). Only this specific -#! account can consume the note - any other account will cause a panic. -#! - output_note_tag: P2ID output note tag -#! - padding (1 felt) +#! - miden_claim_amount: Scaled-down Miden token amount (Felt). This is the Y value computed from +#! scaling down the Ethereum amount (X) by the scale exponent: Y = floor(X / 10^scale_exp) #! #! Panics if: #! - account does not expose claim procedure. -#! - target faucet account ID does not match the consuming account ID. +#! - note attachment target account does not match the consuming account. begin dropw # => [pad(16)] - # Load CLAIM note storage into memory, starting at address 0 - push.0 exec.active_note::get_storage drop drop + # Ensure note attachment targets the consuming bridge account. + exec.network_account_target::active_account_matches_target_account + assert.err=ERR_CLAIM_TARGET_ACCT_MISMATCH # => [pad(16)] - # Check consuming account == aggfaucet - exec.assert_aggfaucet_is_consumer + # Load CLAIM note storage into memory, starting at address 0 + push.0 exec.active_note::get_storage drop drop # => [pad(16)] exec.write_claim_data_into_advice_map_by_key - # => [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY, pad(4)] + # => [PROOF_DATA_KEY, LEAF_DATA_KEY, pad(16)] + + mem_load.FAUCET_MINT_AMOUNT + # => [faucet_mint_amount, PROOF_DATA_KEY, LEAF_DATA_KEY, pad(16)] + + movdn.8 + # => [PROOF_DATA_KEY, LEAF_DATA_KEY, faucet_mint_amount, pad(16)] + + # call the Bridge Claim procedure + call.bridge::claim + # => [pad(16), pad(9)] - # Call the Aggfaucet Claim procedure - call.agg_faucet::claim - # => [pad(16), pad(12)] + # a call invocation consumes and returns 16 elements, but we had trailing padding + dropw dropw drop - dropw dropw dropw # => [pad(16)] end diff --git a/crates/miden-agglayer/asm/note_scripts/CONFIG_AGG_BRIDGE.masm b/crates/miden-agglayer/asm/note_scripts/CONFIG_AGG_BRIDGE.masm new file mode 100644 index 0000000000..12259b36f7 --- /dev/null +++ b/crates/miden-agglayer/asm/note_scripts/CONFIG_AGG_BRIDGE.masm @@ -0,0 +1,86 @@ +use agglayer::bridge::bridge_config +use miden::protocol::active_note +use miden::protocol::active_account +use miden::protocol::account_id +use miden::standards::attachments::network_account_target + +# CONSTANTS +# ================================================================================================= + +const STORAGE_START_PTR = 0 +const CONFIG_AGG_BRIDGE_NUM_STORAGE_ITEMS = 7 + +const FAUCET_ID_SUFFIX = 5 +const FAUCET_ID_PREFIX = 6 +const ORIGIN_TOKEN_ADDR_4 = 4 + +# ERRORS +# ================================================================================================= + +const ERR_CONFIG_AGG_BRIDGE_UNEXPECTED_STORAGE_ITEMS = "CONFIG_AGG_BRIDGE expects exactly 7 note storage items" +const ERR_CONFIG_AGG_BRIDGE_TARGET_ACCOUNT_MISMATCH = "CONFIG_AGG_BRIDGE note attachment target account does not match consuming account" + +#! Registers a faucet in the bridge's faucet registry and token registry. +#! +#! This note can only be consumed by the Agglayer Bridge account that is targeted by the note +#! attachment, and only if the note was sent by the bridge admin. +#! Upon consumption, it registers the faucet ID and origin token address mapping in the bridge. +#! +#! Requires that the account exposes: +#! - agglayer::bridge_config::register_faucet procedure. +#! +#! Inputs: [ARGS, pad(12)] +#! Outputs: [pad(16)] +#! +#! NoteStorage layout (7 felts total): +#! - origin_token_addr_0 [0]: 1 felt +#! - origin_token_addr_1 [1]: 1 felt +#! - origin_token_addr_2 [2]: 1 felt +#! - origin_token_addr_3 [3]: 1 felt +#! - origin_token_addr_4 [4]: 1 felt +#! - faucet_id_suffix [5]: 1 felt +#! - faucet_id_prefix [6]: 1 felt + +#! +#! Where: +#! - faucet_id_suffix: Suffix felt of the faucet account ID to register. +#! - faucet_id_prefix: Prefix felt of the faucet account ID to register. +#! +#! Panics if: +#! - The note attachment target account does not match the consuming bridge account. +#! - The note does not contain exactly 7 storage items. +#! - The account does not expose the register_faucet procedure. +begin + dropw + # => [pad(16)] + + # Ensure note attachment targets the consuming bridge account. + exec.network_account_target::active_account_matches_target_account + assert.err=ERR_CONFIG_AGG_BRIDGE_TARGET_ACCOUNT_MISMATCH + # => [pad(16)] + + # Load note storage to memory + push.STORAGE_START_PTR exec.active_note::get_storage + # => [num_storage_items, dest_ptr, pad(16)] + + push.CONFIG_AGG_BRIDGE_NUM_STORAGE_ITEMS assert_eq.err=ERR_CONFIG_AGG_BRIDGE_UNEXPECTED_STORAGE_ITEMS drop + # => [pad(16)] + + # Load origin_token_addr(5) and faucet_id from memory + # register_faucet expects: [origin_token_addr(5), faucet_id_suffix, faucet_id_prefix, pad(9)] + + # Load all 7 values individually in the correct order + mem_load.FAUCET_ID_PREFIX mem_load.FAUCET_ID_SUFFIX mem_load.ORIGIN_TOKEN_ADDR_4 + # => [addr4, faucet_id_suffix, faucet_id_prefix, pad(16)] + + padw mem_loadw_le.STORAGE_START_PTR + # => [addr4, addr3, addr2, addr1, addr0, faucet_id_suffix, faucet_id_prefix, pad(16)] + + # Register the faucet in the bridge + # => [addr4, addr3, addr2, addr1, addr0, faucet_id_suffix, faucet_id_prefix, pad(9), pad(7)] + call.bridge_config::register_faucet + # => [pad(16), pad(7)] + + dropw drop drop drop + # => [pad(16)] +end diff --git a/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm b/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm index 1ca3d1ab9d..c11d609db0 100644 --- a/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm +++ b/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm @@ -1,4 +1,4 @@ -use miden::agglayer::bridge_in +use agglayer::bridge::bridge_config use miden::protocol::active_note use miden::protocol::active_account use miden::protocol::account_id @@ -19,13 +19,14 @@ const ERR_UPDATE_GER_TARGET_ACCOUNT_MISMATCH = "UPDATE_GER note attachment targe # NOTE SCRIPT # ================================================================================================= -#! Agglayer Bridge UPDATE_GER script: updates the GER by calling the bridge_in::update_ger function. +#! Agglayer Bridge UPDATE_GER script: updates the GER by calling the bridge_config::update_ger function. #! #! This note can only be consumed by the specific agglayer bridge account whose ID is provided -#! in the note attachment (target_account_id). +#! in the note attachment (target_account_id), and only if the note was sent by the +#! global exit root manager. #! #! Requires that the account exposes: -#! - agglayer::bridge_in::update_ger procedure. +#! - agglayer::bridge_config::update_ger procedure. #! #! Inputs: [ARGS, pad(12)] #! Outputs: [pad(16)] @@ -55,13 +56,13 @@ begin # => [pad(16)] # Load GER_LOWER and GER_UPPER from note storage - mem_loadw_be.STORAGE_PTR_GER_UPPER + mem_loadw_le.STORAGE_PTR_GER_UPPER # => [GER_UPPER[4], pad(12)] swapw - mem_loadw_be.STORAGE_PTR_GER_LOWER + mem_loadw_le.STORAGE_PTR_GER_LOWER # => [GER_LOWER[4], GER_UPPER[4], pad(8)] - call.bridge_in::update_ger + call.bridge_config::update_ger # => [pad(16)] end diff --git a/crates/miden-agglayer/build.rs b/crates/miden-agglayer/build.rs index d91a2ac1d7..15fc9efaad 100644 --- a/crates/miden-agglayer/build.rs +++ b/crates/miden-agglayer/build.rs @@ -1,12 +1,21 @@ use std::env; +use std::fmt::Write; use std::path::Path; use fs_err as fs; -use miden_assembly::diagnostics::{IntoDiagnostic, Result, WrapErr}; -use miden_assembly::utils::Serializable; +use miden_assembly::diagnostics::{IntoDiagnostic, NamedSource, Result, WrapErr}; +use miden_assembly::serde::Serializable; use miden_assembly::{Assembler, Library, Report}; use miden_crypto::hash::keccak::{Keccak256, Keccak256Digest}; +use miden_protocol::account::{ + AccountCode, + AccountComponent, + AccountComponentMetadata, + AccountType, +}; use miden_protocol::transaction::TransactionKernel; +use miden_standards::account::auth::NoAuth; +use miden_standards::account::mint_policies::OwnerControlled; // CONSTANTS // ================================================================================================ @@ -19,17 +28,21 @@ const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN const ASSETS_DIR: &str = "assets"; const ASM_DIR: &str = "asm"; const ASM_NOTE_SCRIPTS_DIR: &str = "note_scripts"; -const ASM_BRIDGE_DIR: &str = "bridge"; +const ASM_AGGLAYER_DIR: &str = "agglayer"; +const ASM_AGGLAYER_BRIDGE_DIR: &str = "agglayer/bridge"; +const ASM_COMPONENTS_DIR: &str = "components"; const AGGLAYER_ERRORS_FILE: &str = "src/errors/agglayer.rs"; const AGGLAYER_ERRORS_ARRAY_NAME: &str = "AGGLAYER_ERRORS"; +const AGGLAYER_GLOBAL_CONSTANTS_FILE_NAME: &str = "agglayer_constants.rs"; // PRE-PROCESSING // ================================================================================================ /// Read and parse the contents from `./asm`. +/// - Compiles the contents of asm/agglayer directory into a single agglayer.masl library. +/// - Compiles the contents of asm/components directory into individual per-component .masl files. /// - Compiles the contents of asm/note_scripts directory into individual .masb files. -/// - Compiles the contents of asm/account_components directory into individual .masl files. fn main() -> Result<()> { // re-build when the MASM code changes println!("cargo::rerun-if-changed={ASM_DIR}/"); @@ -40,8 +53,8 @@ fn main() -> Result<()> { let build_dir = env::var("OUT_DIR").unwrap(); let src = Path::new(&crate_dir).join(ASM_DIR); - // generate canonical zeros in `asm/bridge/canonical_zeros.masm` - generate_canonical_zeros(&src.join(ASM_BRIDGE_DIR))?; + // generate canonical zeros in `asm/agglayer/bridge/canonical_zeros.masm` + generate_canonical_zeros(&src.join(ASM_AGGLAYER_BRIDGE_DIR))?; let dst = Path::new(&build_dir).to_path_buf(); shared::copy_directory(src, &dst, ASM_DIR)?; @@ -59,6 +72,13 @@ fn main() -> Result<()> { let mut assembler = TransactionKernel::assembler(); assembler.link_static_library(agglayer_lib)?; + // compile account components (thin wrappers per component) and return their libraries + let component_libraries = compile_account_components( + &source_dir.join(ASM_COMPONENTS_DIR), + &target_dir.join(ASM_COMPONENTS_DIR), + assembler.clone(), + )?; + // compile note scripts compile_note_scripts( &source_dir.join(ASM_NOTE_SCRIPTS_DIR), @@ -66,6 +86,10 @@ fn main() -> Result<()> { assembler.clone(), )?; + // generate agglayer specific constants + let constants_out_path = Path::new(&build_dir).join(AGGLAYER_GLOBAL_CONSTANTS_FILE_NAME); + generate_agglayer_constants(constants_out_path, component_libraries)?; + generate_error_constants(&source_dir)?; Ok(()) @@ -74,7 +98,7 @@ fn main() -> Result<()> { // COMPILE AGGLAYER LIB // ================================================================================================ -/// Reads the MASM files from "{source_dir}/bridge" directory, compiles them into a Miden +/// Reads the MASM files from "{source_dir}/agglayer" directory, compiles them into a Miden /// assembly library, saves the library into "{target_dir}/agglayer.masl", and returns the compiled /// library. fn compile_agglayer_lib( @@ -82,13 +106,13 @@ fn compile_agglayer_lib( target_dir: &Path, mut assembler: Assembler, ) -> Result { - let source_dir = source_dir.join(ASM_BRIDGE_DIR); + let source_dir = source_dir.join(ASM_AGGLAYER_DIR); // Add the miden-standards library to the assembler so agglayer components can use it let standards_lib = miden_standards::StandardsLib::default(); assembler.link_static_library(standards_lib)?; - let agglayer_lib = assembler.assemble_library_from_dir(source_dir, "miden::agglayer")?; + let agglayer_lib = assembler.assemble_library_from_dir(source_dir, "agglayer")?; let output_file = target_dir.join("agglayer").with_extension(Library::LIBRARY_EXTENSION); agglayer_lib.write_to_file(output_file).into_diagnostic()?; @@ -136,38 +160,31 @@ fn compile_note_scripts( Ok(()) } -// COMPILE ACCOUNT COMPONENTS (DEPRECATED) +// COMPILE ACCOUNT COMPONENTS // ================================================================================================ -/// Compiles the agglayer library in `source_dir` into MASL libraries and stores the compiled -/// files in `target_dir`. +/// Compiles the account components in `source_dir` into MASL libraries, stores the compiled +/// files in `target_dir`, and returns a vector of compiled component libraries along with their +/// names. +/// +/// Each `.masm` file in the components directory is a thin wrapper that re-exports specific +/// procedures from the main agglayer library. This ensures each component (bridge, faucet) +/// only exposes the procedures relevant to its role. /// -/// NOTE: This function is deprecated and replaced by compile_agglayer_lib -fn _compile_bridge_components( +/// The assembler must already have the agglayer library linked so that `pub use` re-exports +/// can resolve. +fn compile_account_components( source_dir: &Path, target_dir: &Path, - mut assembler: Assembler, -) -> Result { + assembler: Assembler, +) -> Result> { if !target_dir.exists() { fs::create_dir_all(target_dir).unwrap(); } - // Add the miden-standards library to the assembler so agglayer components can use it - let standards_lib = miden_standards::StandardsLib::default(); - assembler.link_static_library(standards_lib)?; + let mut component_libraries = Vec::new(); - // Compile all components together as a single library under the "miden::agglayer" namespace - // This allows cross-references between components (e.g., bridge_out using - // miden::agglayer::local_exit_tree) - let agglayer_library = assembler.assemble_library_from_dir(source_dir, "miden::agglayer")?; - - // Write the combined library - let library_path = target_dir.join("agglayer").with_extension(Library::LIBRARY_EXTENSION); - agglayer_library.write_to_file(library_path).into_diagnostic()?; - - // Also write individual component files for reference - let masm_files = shared::get_masm_files(source_dir).unwrap(); - for masm_file_path in &masm_files { + for masm_file_path in shared::get_masm_files(source_dir).unwrap() { let component_name = masm_file_path .file_stem() .expect("masm file should have a file stem") @@ -175,14 +192,107 @@ fn _compile_bridge_components( .expect("file stem should be valid UTF-8") .to_owned(); - let component_source_code = fs::read_to_string(masm_file_path) + let component_source_code = fs::read_to_string(&masm_file_path) .expect("reading the component's MASM source code should succeed"); - let individual_file_path = target_dir.join(&component_name).with_extension("masm"); - fs::write(individual_file_path, component_source_code).into_diagnostic()?; + let named_source = NamedSource::new(component_name.clone(), component_source_code); + + let component_library = assembler + .clone() + .assemble_library([named_source]) + .expect("library assembly should succeed"); + + let component_file_path = + target_dir.join(&component_name).with_extension(Library::LIBRARY_EXTENSION); + component_library.write_to_file(&component_file_path).into_diagnostic()?; + + component_libraries.push((component_name, component_library)); + } + + Ok(component_libraries) +} + +// GENERATE AGGLAYER CONSTANTS +// ================================================================================================ + +/// Generates a Rust file containing AggLayer specific constants. +/// +/// At the moment, this file contains the following constants: +/// - AggLayer Bridge code commitment. +/// - AggLayer Faucet code commitment. +fn generate_agglayer_constants( + target_file: impl AsRef, + component_libraries: Vec<(String, Library)>, +) -> Result<()> { + let mut file_contents = String::new(); + + writeln!( + file_contents, + "// This file is generated by build.rs, do not modify manually.\n" + ) + .unwrap(); + + writeln!( + file_contents, + "// AGGLAYER CONSTANTS +// ================================================================================================ +" + ) + .unwrap(); + + // Create a dummy metadata to be able to create components. We only interested in the resulting + // code commitment, so it doesn't matter what does this metadata holds. + let dummy_metadata = AccountComponentMetadata::new("dummy", AccountType::all()); + + // iterate over the AggLayer Bridge and AggLayer Faucet libraries + for (lib_name, content_library) in component_libraries { + let agglayer_component = + AccountComponent::new(content_library, vec![], dummy_metadata.clone()).unwrap(); + + // The faucet account includes Ownable2Step and OwnerControlled components + // alongside the agglayer faucet component, since network_fungible::mint_and_send + // requires these for access control. + let mut components: Vec = + vec![AccountComponent::from(NoAuth), agglayer_component]; + if lib_name == "faucet" { + // Use a dummy owner for commitment computation - the actual owner is set at runtime + let dummy_owner = miden_protocol::account::AccountId::try_from( + miden_protocol::testing::account_id::ACCOUNT_ID_REGULAR_NETWORK_ACCOUNT_IMMUTABLE_CODE, + ) + .unwrap(); + components.push(AccountComponent::from( + miden_standards::account::access::Ownable2Step::new(dummy_owner), + )); + components.push(AccountComponent::from(OwnerControlled::owner_only())); + } + + // use `AccountCode` to merge codes of agglayer and authentication components + let account_code = AccountCode::from_components(&components, AccountType::FungibleFaucet) + .expect("account code creation failed"); + + let code_commitment = account_code.commitment(); + + writeln!( + file_contents, + "pub const {}_CODE_COMMITMENT: Word = Word::new([ + Felt::new({}), + Felt::new({}), + Felt::new({}), + Felt::new({}), +]);", + lib_name.to_uppercase(), + code_commitment[0], + code_commitment[1], + code_commitment[2], + code_commitment[3], + ) + .unwrap(); } - Ok(agglayer_library) + // write the resulting constants to the target directory + shared::write_if_changed(target_file, file_contents.as_bytes())?; + + Ok(()) } // ERROR CONSTANTS FILE GENERATION @@ -273,7 +383,6 @@ fn generate_canonical_zeros(target_dir: &Path) -> Result<()> { let zero_as_u32_vec = zero .chunks(4) .map(|chunk_u32| u32::from_le_bytes(chunk_u32.try_into().unwrap()).to_string()) - .rev() .collect::>(); zero_constants.push_str(&format!( @@ -287,7 +396,7 @@ fn generate_canonical_zeros(target_dir: &Path) -> Result<()> { // remove once CANONICAL_ZEROS advice map is available zero_constants.push_str( " -use ::miden::agglayer::mmr_frontier32_keccak::mem_store_double_word +use ::agglayer::common::utils::mem_store_double_word #! Inputs: [zeros_ptr] #! Outputs: [] @@ -295,7 +404,7 @@ pub proc load_zeros_to_memory\n", ); for zero_index in 0..32 { - zero_constants.push_str(&format!("\tpush.ZERO_{zero_index}_L.ZERO_{zero_index}_R exec.mem_store_double_word dropw dropw add.8\n")); + zero_constants.push_str(&format!("\tpush.ZERO_{zero_index}_R.ZERO_{zero_index}_L exec.mem_store_double_word dropw dropw add.8\n")); } zero_constants.push_str("\tdrop\nend\n"); diff --git a/crates/miden-agglayer/solidity-compat/README.md b/crates/miden-agglayer/solidity-compat/README.md index f93f83b5bc..b45b6edced 100644 --- a/crates/miden-agglayer/solidity-compat/README.md +++ b/crates/miden-agglayer/solidity-compat/README.md @@ -46,5 +46,12 @@ The canonical zeros should match the constants in: ### MMR Frontier Vectors -The `test_generateVectors` adds leaves `0, 1, 2, ...` (as left-padded 32-byte values) -and outputs the root after each addition. +The `test_generateVectors` adds 32 leaves and outputs the root after each addition. +Each leaf uses: + +- `amounts[i] = i + 1` +- `destination_networks[i]` and `destination_addresses[i]` generated deterministically from + a fixed seed in `MMRTestVectors.t.sol` + +This gives reproducible "random-looking" destination parameters while keeping vector generation +stable across machines and reruns. diff --git a/crates/miden-agglayer/solidity-compat/foundry.lock b/crates/miden-agglayer/solidity-compat/foundry.lock index 8aa165ad75..f8ac0886bd 100644 --- a/crates/miden-agglayer/solidity-compat/foundry.lock +++ b/crates/miden-agglayer/solidity-compat/foundry.lock @@ -7,5 +7,23 @@ "name": "v1.14.0", "rev": "1801b0541f4fda118a10798fd3486bb7051c5dd6" } + }, + "lib/openzeppelin-contracts": { + "branch": { + "name": "release-v5.0", + "rev": "dbb6104ce834628e473d2173bbc9d47f81a9eec3" + } + }, + "lib/openzeppelin-contracts-upgradeable": { + "branch": { + "name": "release-v4.9", + "rev": "2d081f24cac1a867f6f73d512f2022e1fa987854" + } + }, + "lib/openzeppelin-contracts-upgradeable5": { + "branch": { + "name": "release-v5.0", + "rev": "723f8cab09cdae1aca9ec9cc1cfa040c2d4b06c1" + } } } \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/foundry.toml b/crates/miden-agglayer/solidity-compat/foundry.toml index c22ad7e3f6..36d2c9934e 100644 --- a/crates/miden-agglayer/solidity-compat/foundry.toml +++ b/crates/miden-agglayer/solidity-compat/foundry.toml @@ -1,10 +1,18 @@ [profile.default] -libs = ["lib"] -out = "out" -solc = "0.8.20" -src = "src" +libs = ["lib"] +optimizer = true +optimizer_runs = 200 +out = "out" +solc = "0.8.28" +src = "src" +via_ir = true -remappings = ["@agglayer/=lib/agglayer-contracts/contracts/"] +remappings = [ + "@agglayer/=lib/agglayer-contracts/contracts/", + "@openzeppelin/contracts-upgradeable4/=lib/openzeppelin-contracts-upgradeable/contracts/", + "@openzeppelin/contracts-upgradeable5/=lib/openzeppelin-contracts-upgradeable5/contracts/", + "@openzeppelin/contracts5/=lib/openzeppelin-contracts/contracts/", +] # Emit extra output for test vector generation ffi = false diff --git a/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts new file mode 160000 index 0000000000..dbb6104ce8 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts @@ -0,0 +1 @@ +Subproject commit dbb6104ce834628e473d2173bbc9d47f81a9eec3 diff --git a/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable new file mode 160000 index 0000000000..2d081f24ca --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable @@ -0,0 +1 @@ +Subproject commit 2d081f24cac1a867f6f73d512f2022e1fa987854 diff --git a/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable5 b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable5 new file mode 160000 index 0000000000..723f8cab09 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable5 @@ -0,0 +1 @@ +Subproject commit 723f8cab09cdae1aca9ec9cc1cfa040c2d4b06c1 diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json new file mode 100644 index 0000000000..f21309609a --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json @@ -0,0 +1,87 @@ +{ + "amount": "100000000000000000000", + "claimed_global_index_hash_chain": "0xbce0afc98c69ea85e9cfbf98c87c58a77c12d857551f1858530341392f70c22d", + "deposit_count": 1, + "description": "L1 bridgeAsset transaction test vectors with valid Merkle proofs", + "destination_address": "0x00000000AA0000000000bb000000cc000000Dd00", + "destination_network": 20, + "global_exit_root": "0xc84f1e3744c151b345a8899034b3677c0fdbaf45aa3aaf18a3f97dbcf70836cb", + "global_index": "0x0000000000000000000000000000000000000000000000010000000000000000", + "leaf_type": 0, + "leaf_value": "0x9d85d7c56264697df18f458b4b12a457b87b7e7f7a9b16dcb368514729ef680d", + "local_exit_root": "0xc9e095ea4cfe19b7e9a6d1aff6c55914ccc8df34954f9f6a2ad8e42d2632a0ab", + "mainnet_exit_root": "0xc9e095ea4cfe19b7e9a6d1aff6c55914ccc8df34954f9f6a2ad8e42d2632a0ab", + "metadata": "0x000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000a5465737420546f6b656e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000045445535400000000000000000000000000000000000000000000000000000000", + "metadata_hash": "0x4d0d9fb7f9ab2f012da088dc1c228173723db7e09147fe4fea2657849d580161", + "origin_network": 0, + "origin_token_address": "0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF", + "rollup_exit_root": "0xd18cc25ae65a4e3d95587ffea9411747238567d6e5d3744240554713edefc197", + "smt_proof_local_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ], + "smt_proof_rollup_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json new file mode 100644 index 0000000000..e4423417fb --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json @@ -0,0 +1,83 @@ +{ + "amount": 100000000000000, + "claimed_global_index_hash_chain": "0xd2bb2f0231ee9ea0c88e89049bea6dbcf7dd96a1015ca9e66ab38ef3c8dc928e", + "destination_address": "0x00000000b0E79c68cafC54802726C6F102Cca300", + "destination_network": 20, + "global_exit_root": "0xe1cbfbde30bd598ee9aa2ac913b60d53e3297e51ed138bf86c500dd7d2391e7d", + "global_index": "0x0000000000000000000000000000000000000000000000010000000000039e88", + "leaf_type": 0, + "leaf_value": "0xc58420b9b4ba439bb5f6f68096270f4df656553ec67150d4d087416b9ef6ea9d", + "mainnet_exit_root": "0x31d3268d3a0145d65482b336935fa07dab0822f7dccd865f361d2bf122c4905c", + "metadata_hash": "0x945d61756eddd06a335ceff22d61480fc2086e85e74a55db5485f814626247d5", + "origin_network": 0, + "origin_token_address": "0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF", + "rollup_exit_root": "0x8452a95fd710163c5fa8ca2b2fe720d8781f0222bb9e82c2a442ec986c374858", + "smt_proof_local_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0xe37d456460231cf80063f57ee83a02f70d810c568b3bfb71156d52445f7a885a", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0x3236bf576fca1adf85917ec7888c4b89cce988564b6028f7d66807763aaa7b04", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0x054ba828046324ff4794fce22adefb23b3ce749cd4df75ade2dc9f41dd327c31", + "0x4e9220076c344bf223c7e7cb2d47c9f0096c48def6a9056e41568de4f01d2716", + "0xca6369acd49a7515892f5936227037cc978a75853409b20f1145f1d44ceb7622", + "0x5a925caf7bfdf31344037ba5b42657130d049f7cb9e87877317e79fce2543a0c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0x4111a1a05cc06ad682bb0f213170d7d57049920d20fc4e0f7556a21b283a7e2a", + "0x77a0f8b0e0b4e5a57f5e381b3892bb41a0bcdbfdf3c7d591fae02081159b594d", + "0x361122b4b1d18ab577f2aeb6632c690713456a66a5670649ceb2c0a31e43ab46", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ], + "smt_proof_rollup_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_rollup_tx.json b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_rollup_tx.json new file mode 100644 index 0000000000..aafa61f593 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_rollup_tx.json @@ -0,0 +1,87 @@ +{ + "amount": "100000000000000000000", + "claimed_global_index_hash_chain": "0x68ace2f015593d5f6de5338c9eca6e748764574491b9f0eed941a2b49db1a7a3", + "deposit_count": 3, + "description": "Rollup deposit test vectors with valid two-level Merkle proofs (non-zero indices)", + "destination_address": "0x00000000AA0000000000bb000000cc000000Dd00", + "destination_network": 20, + "global_exit_root": "0x677d4ecba0ff4871f33163e70ea39a13fe97f2fa9b4dbad110e398830a324159", + "global_index": "0x0000000000000000000000000000000000000000000000000000000500000002", + "leaf_type": 0, + "leaf_value": "0x4a6a047a2b89dd9c557395833c5e34c4f72e6f9aae70779e856f14a6a2827585", + "local_exit_root": "0x985cff7ee35794b30fba700b64546b4ec240d2d78aaf356d56e83d907009367f", + "mainnet_exit_root": "0x4d63440b08ffffe5a049aae4161d54821a09973965a1a1728534a0f117b6d6c9", + "metadata": "0x000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000a5465737420546f6b656e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000045445535400000000000000000000000000000000000000000000000000000000", + "metadata_hash": "0x4d0d9fb7f9ab2f012da088dc1c228173723db7e09147fe4fea2657849d580161", + "origin_network": 3, + "origin_token_address": "0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF", + "rollup_exit_root": "0x91105681934ca0791f4e760fb1f702050d81e4b7c866d42f540710999c90ea97", + "smt_proof_local_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xa8367b4263332f7e5453faa770f07ef4ce3e74fc411e0a788a98b38b91fd3b3e", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ], + "smt_proof_rollup_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/exit_roots.json b/crates/miden-agglayer/solidity-compat/test-vectors/exit_roots.json new file mode 100644 index 0000000000..b04fbdf844 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/exit_roots.json @@ -0,0 +1,14 @@ +{ + "global_exit_roots": [ + "0x207f0b7db488bbc423fc3d12db21b97e574453e12b49ca21205181af677d7b04", + "0x8e10e03b7db5ffe76edbea651052f8045289ece97947297de6279ce9f6730252" + ], + "mainnet_exit_roots": [ + "0x98c911b6dcface93fd0bb490d09390f2f7f9fcf36fc208cbb36528a229298326", + "0xbb71d991caf89fe64878259a61ae8d0b4310c176e66d90fd2370b02573e80c90" + ], + "rollup_exit_roots": [ + "0x6a2533a24cc2a3feecf5c09b6a270bbb24a5e2ce02c18c0e26cd54c3dddc2d70", + "0xd9b546933b59acd388dc0c6520cbf2d4dbb9bac66f74f167ba70f221d82a440c" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json b/crates/miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json new file mode 100644 index 0000000000..8d89835c88 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json @@ -0,0 +1,10 @@ +{ + "amount": 2000000000000000000, + "destination_address": "0xD9b20Fe633b609B01081aD0428e81f8Dd604F5C5", + "destination_network": 7, + "leaf_type": 0, + "leaf_value": "0xb67e42971034605367b7e92d1ad1d4648c3ffe0bea9b08115cd9aa2e616b2f88", + "metadata_hash": "0x6c7a91a5fb41dee8f0bc1c86b5587334583186f14acfa253e2f7c2833d1d6fdf", + "origin_network": 0, + "origin_token_address": "0xD9343a049D5DBd89CD19DC6BcA8c48fB3a0a42a7" +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json b/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json index e51ea4e4e9..df788f9498 100644 --- a/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json +++ b/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json @@ -1,4 +1,38 @@ { + "amounts": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32 + ], "counts": [ 1, 2, @@ -33,72 +67,144 @@ 31, 32 ], + "destination_addresses": [ + "0xB48074703337bEf6e94A9e2E1FfFe71632B42D56", + "0xBA60cd3cBD12619e6983B5D0E1CbcF2f4fed9d7b", + "0x89510362d6EdeB958F059727C9eD0F99298aAFa4", + "0xD62Cf6356E0a48e2014b71Cf942BEbBbFb00F7d7", + "0xFA5eacb9668731D74F2BB5Ad5bfB319f5A91c87D", + "0x90DD6647e5c91f9104a548876868a54795696B34", + "0x0E76F5f993A9a7f961e06397BC71d15c278A0b6c", + "0xe022226D1fFcCf12ac0e84D0aB9430F3fd56C613", + "0x1F9ecff77E28Bca8Ef18434B842A30579Bfd4EaA", + "0xe51D207B549Db157BeE9faeBd51C35aB47d180EF", + "0x9f30d6d0335E91e0593f13a567E4Fee661e1259F", + "0xE8F13Da1BDb719ba364a890a623454040A932eCf", + "0xb6EE19bf265563aA76dbe202e8dC71F8f42a58B1", + "0xf62d45e4D0DC57259B4557b5d79Ea23F67D0E381", + "0xaa94f5480aD0C906044E5E7Da8BB6BC4395aA498", + "0x060ddd9f6e6CF285004e33C30b46710ad75918Dd", + "0x8B743c166e1dA1444781AD2b5Fe2291578ABCeb1", + "0x8B08d9A773273Df976fb7448D38FeEeB15Dc34F8", + "0xbe931f6F189e6F8Da14f7B67Eb2E67b5D7f71c1d", + "0x2F891C182b23d1422D8Fddd9CC30B25BB849Bd5F", + "0x93fD7DEd75058ABA1B76C35c4Ac4e9355e596EdC", + "0x25B9eBC8D7d48a6B0e71e82Aa66832aCC9419E3A", + "0xbb086ECaC1316B81107e3CA591ef645831094E5a", + "0x08c7a5Db749DEf9280108Ec5e0354d4957CB17cF", + "0x0da76aA44116fad143F778f25907046E52F8c4d3", + "0xcFd0a3bfA35E771aad88C64EF0A310efF6730cDa", + "0xa7439b51638F31f054C93EC869C8c7E982699BAC", + "0x5C9A97f096CB18903994C44ddC07FfD921490B2c", + "0x0e52786aF0b48D764a255f6506C9C297d5BA2Dc3", + "0x5C2093921171F2c2d657eAA681D463Fe36c965d1", + "0xf8de801F1ba2a676d96Eb1F1ccB0B0CADFCbbE9e", + "0x31D230FAbAd05777Bb3E1a062e781446Bc422b80" + ], + "destination_networks": [ + 1538671592, + 1271685039, + 2812858243, + 1717044446, + 1618236512, + 1846799397, + 1114625417, + 1980472020, + 3445581035, + 1216050355, + 1334555263, + 1595653741, + 1406956437, + 2339872987, + 1591634953, + 2036330440, + 948554316, + 1629580568, + 4209912969, + 3528172732, + 4197496357, + 2020389543, + 1365501531, + 2591126838, + 273689805, + 543018504, + 3291055054, + 2685286074, + 3030491074, + 4166649488, + 1541470110, + 1181416010 + ], "leaves": [ - "0x0000000000000000000000000000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000000000000000000000000001", - "0x0000000000000000000000000000000000000000000000000000000000000002", - "0x0000000000000000000000000000000000000000000000000000000000000003", - "0x0000000000000000000000000000000000000000000000000000000000000004", - "0x0000000000000000000000000000000000000000000000000000000000000005", - "0x0000000000000000000000000000000000000000000000000000000000000006", - "0x0000000000000000000000000000000000000000000000000000000000000007", - "0x0000000000000000000000000000000000000000000000000000000000000008", - "0x0000000000000000000000000000000000000000000000000000000000000009", - "0x000000000000000000000000000000000000000000000000000000000000000a", - "0x000000000000000000000000000000000000000000000000000000000000000b", - "0x000000000000000000000000000000000000000000000000000000000000000c", - "0x000000000000000000000000000000000000000000000000000000000000000d", - "0x000000000000000000000000000000000000000000000000000000000000000e", - "0x000000000000000000000000000000000000000000000000000000000000000f", - "0x0000000000000000000000000000000000000000000000000000000000000010", - "0x0000000000000000000000000000000000000000000000000000000000000011", - "0x0000000000000000000000000000000000000000000000000000000000000012", - "0x0000000000000000000000000000000000000000000000000000000000000013", - "0x0000000000000000000000000000000000000000000000000000000000000014", - "0x0000000000000000000000000000000000000000000000000000000000000015", - "0x0000000000000000000000000000000000000000000000000000000000000016", - "0x0000000000000000000000000000000000000000000000000000000000000017", - "0x0000000000000000000000000000000000000000000000000000000000000018", - "0x0000000000000000000000000000000000000000000000000000000000000019", - "0x000000000000000000000000000000000000000000000000000000000000001a", - "0x000000000000000000000000000000000000000000000000000000000000001b", - "0x000000000000000000000000000000000000000000000000000000000000001c", - "0x000000000000000000000000000000000000000000000000000000000000001d", - "0x000000000000000000000000000000000000000000000000000000000000001e", - "0x000000000000000000000000000000000000000000000000000000000000001f" + "0x583cc77fc2b7280dae7433767e49a7c6d9a33f0410e179814f3aa1dbed9e5383", + "0x39b63728fe06dbc9e883852005cce44a1e6515ed55e8b1dbf3b6758179716f11", + "0xc4971cb8c3f11aedc287a9855739bb007822038c054cb6808e03131c9f91a0f1", + "0x629eb6a1e17d6aea8011061da05909d2f7312467aa8f32738861fb940b157174", + "0xf405ea66eca447509b4ffc555fb9dbcae535b11e55a4331d02819e0ca9984575", + "0xfaa2e8faa0081b6534e90f6fe58e9c5232afe98bcc9f1695e544e02f3569463d", + "0xb89ca15ba6ca7c7a208e24d7353ad31282ef134662f659fac32f27df2ae3320f", + "0x79bdf5742cc5cc4ef8f888a231e367c50b4430a9459541facb343111c92e6bf8", + "0x0822a3dc7f0c51e70dd73014e18df2981c4bde688eec541581558c3de0ad6f65", + "0xbd91e0fa090c5a988b4af55366454b0e66f565f313127d4775bb44e446baa917", + "0xfd3fe60322ffefecdd2e5b9b1ccc99f335dcb63c48bdd4c0694978ff64554abb", + "0xb8f5374c52ea2b64d00f566b798a42fabe4405817327b361cd2e57b17949917a", + "0xd84e6a9f537e1e71ef75ee1b4c9aecaa4f192b65fd3b2c5a276aa82193196c00", + "0xaa746c560d044f6c4ddab4a0553cde8fe6aa95478fe198bcb4b0b9ad3c3b92af", + "0xf9bf642edac2a5f80a899ab3a91aefb6d9afbaf107fa34557c9dc66c6bed4611", + "0xf3b649080b6f226a027260cef334003468ddd40f70f8268b8019613f30f31429", + "0x2e8c2d56396ac75cf085c44ad3939b83f15b3ef886092faaf26373f9083fc49b", + "0x74c483d10393a141f7d1a6d583c324e7b8293f4d8bfa612cfff0a51a8dcd1ddc", + "0x16693082ba7d19cf38216153780011320b4d22133bb541006542f8b24c0bac29", + "0x7132c9fbb1f7ee387c6cdf1ef1554eaa4b791f0de1c2e858a640f3c0e867b1be", + "0x7b0f681fc08c9193034a590e818206c8972887710115677df57113e9b40823cb", + "0xf9513376461d437192b658deaa647a8625e7354f4d59a778114552feaa8b2e70", + "0x4439d51fd28dc9016bbab806805aa36a53fb9a4f02c379b47656d2b4c45c7b39", + "0x3a44b8129f9468dc743ffd55d2cc0390ed565ebaf8955e38a4e8d41714f874c3", + "0xacfc9d4916104a4d0965d1caa24cdf31fa2cf65474f1986175f49ced505d7470", + "0x34a969176d30df0525c1eb1b349d3a24b1a684f5a6f4cf60797d5d213b7007a0", + "0xe16eb94a82b246fa6534867df6ee6217c8b1c850d835d72548d8f85d1504330d", + "0x188e9c5333cc6d9cd5f8c21a71917e22044b1dc6cdc3241ac9187ccd25598884", + "0x2ca6faff026b921ce865e1161688e7debc733c2d699937ce858783ffbca666d8", + "0x0c2819f9ad1daa7dc7a42c0b8c682091dd77c9aa78bbde349b40efd152843b2c", + "0x53c565760b2e54abcf98f888b83a1178a20f47db78aa048738217c0d3e59937b", + "0xc2668ecfa5198b70c0389bc5b71a70f1e2ffe0be832846e1889ad80ee3a8ef34" ], + "origin_token_address": "0x7a6fC3e8b57c6D1924F1A9d0E2b3c4D5e6F70891", "roots": [ - "0x27ae5ba08d7291c96c8cbddcc148bf48a6d68c7974b94356f53754ef6171d757", - "0x4a90a2c108a29b7755a0a915b9bb950233ce71f8a01859350d7b73cc56f57a62", - "0x2757cc260a62cc7c7708c387ea99f2a6bb5f034ed00da845734bec4d3fa3abfe", - "0xcb305ccda4331eb3fd9e17b81a5a0b336fb37a33f927698e9fb0604e534c6a01", - "0xa377a6262d3bae7be0ce09c2cc9f767b0f31848c268a4bdc12b63a451bb97281", - "0x440213f4dff167e3f5c655fbb6a3327af3512affed50ce3c1a3f139458a8a6d1", - "0xdd716d2905f2881005341ff1046ced5ee15cc63139716f56ed6be1d075c3f4a7", - "0xd6ebf96fcc3344fa755057b148162f95a93491bc6e8be756d06ec64df4df90fc", - "0x8b3bf2c95f3d0f941c109adfc3b652fadfeaf6f34be52524360a001cb151b5c9", - "0x74a5712654eccd015c44aca31817fd8bee8da400ada986a78384ef3594f2d459", - "0x95dd1209b92cce04311dfc8670b03428408c4ff62beb389e71847971f73702fa", - "0x0a83f3b2a75e19b7255b1de379ea9a71aef9716a3aef20a86abe625f088bbebf", - "0x601ba73b45858be76c8d02799fd70a5e1713e04031aa3be6746f95a17c343173", - "0x93d741c47aa73e36d3c7697758843d6af02b10ed38785f367d1602c8638adb0d", - "0x578f0d0a9b8ed5a4f86181b7e479da7ad72576ba7d3f36a1b72516aa0900c8ac", - "0x995c30e6b58c6e00e06faf4b5c94a21eb820b9db7ad30703f8e3370c2af10c11", - "0x49fb7257be1e954c377dc2557f5ca3f6fc7002d213f2772ab6899000e465236c", - "0x06fee72550896c50e28b894c60a3132bfe670e5c7a77ab4bb6a8ffb4abcf9446", - "0xbba3a807e79d33c6506cd5ecb5d50417360f8be58139f6dbe2f02c92e4d82491", - "0x1243fbd4d21287dbdaa542fa18a6a172b60d1af2c517b242914bdf8d82a98293", - "0x02b7b57e407fbccb506ed3199922d6d9bd0f703a1919d388c76867399ed44286", - "0xa15e7890d8f860a2ef391f9f58602dec7027c19e8f380980f140bbb92a3e00ba", - "0x2cb7eff4deb9bf6bbb906792bc152f1e63759b30e7829bfb5f3257ee600303f5", - "0xb1b034b4784411dc6858a0da771acef31be60216be0520a7950d29f66aee1fc5", - "0x3b17098f521ca0719e144a12bb79fdc51a3bc70385b5c2ee46b5762aae741f4f", - "0xd3e054489aa750d41938143011666a83e5e6b1477cce5ad612447059c2d8b939", - "0x6d15443ab2f39cce7fbe131843cdad6f27400eb179efb866569dd48baaf3ed4d", - "0xf9386ef40320c369185e48132f8fbf2f3e78d9598495dd342bcf4f41388d460d", - "0xb618ebe1f7675ef246a8cbb93519469076d5caacd4656330801537933e27b172", - "0x6c8c90b5aa967c98061a2dd09ea74dfb61fd9e86e308f14453e9e0ae991116de", - "0x06f51cfc733d71220d6e5b70a6b33a8d47a1ab55ac045fac75f26c762d7b29c9", - "0x82d1ddf8c6d986dee7fc6fa2d7120592d1dc5026b1bb349fcc9d5c73ac026f56" - ] + "0x8943af888cfe3ea3924601d71a6baacd7b87c826da39e9a682eb285ff5031c1b", + "0x805f8bbc68e3df18db265cfe1fa972faf9e29521978640b34450a8ea8bf7a665", + "0xa0da7520543874392c8332ea2d567ebb4b2b10f6897d34f5404263f0f97b1cb8", + "0x15fd7076633c5dc177f675b9bd39418043177d5cf565f3521a25c502c794f102", + "0x5f3b6ebb3858d5481a1ab0ebc7bf51e66d6dcdaafd861b9e6af088963a5a2282", + "0xce80b3372dd297c3d9cbdce7e0a3b7cf39dabfa03665dc1a2778955935fc06da", + "0xaca8bafe682b461769752c13e7313aa65b83ad1a0019f644a5bdf5e453e1274b", + "0x5453a77465da8cf9803ee6e1ff5960cf96ebd14a2e0cd4299995334ed73e802f", + "0x6bb045a5956579c11c97a673f87292085ba3addda57dcd8c40fbd4db63d8b07f", + "0x4bb0337c1f708b56efc1f0f4279d9ca9c94de2187c406c1619947158d83028ea", + "0x12fac4e9109f79710654946bd345128f8202e403a4ba3fed44efc8d5d0e1cd9e", + "0x2938ec7bc586f16a6cca58e1c3a4b060d135d954649a2abdec9094ebf212fadc", + "0x4d287ab8ab3e87b07528774b18fdb81511402f42d4482bdea43c0a1cbe161128", + "0xa801b801f5018d5e39d7c5aba92e4a4fce13845bab8bf51f198c8165aa20f67d", + "0x343379b96ae9187d4cb8f20706cf4e884f517ef02e3700a9f7500e32f3c14fda", + "0xb18e90e56bbca6ee8a7e653eafef633c01d4e41ef9fdbccfb99c50a5c3ae8f8f", + "0xe2d7f15c9ca938f88337cbcc534475eb6d625cce8fbd24f2389d4874cee21962", + "0x6bb8d1fc420da55e1f42becd9ddd6be0a2b184d7827a410e68314f50bdfc55d5", + "0x4379c4b7761be8fb99d8c33f075d6a8206a15017e7e9a9b41f66eefbac85e99e", + "0x225f99e77a68aad477ae85289bfcf54c86845ab6f5dcb0eea9cb97137a9de128", + "0xcb18011fca44a052414a2f86eb19c63a986868f7cef55f9d1f936e0fd8a1e18a", + "0xbac2a9dbeeeb688616c998fe977f0db04d6021b90fa4e7f0aa1347e8ae8eccef", + "0x94c4018c9810210df4a63db14f1949f6599da6f3c1760efcd4402388a8d9c3b6", + "0x21de8642d818c1ddb0d5f9b5c06a73c1db6c03a753828192a151c08a5e524c80", + "0xd1845fc44e07f7751ab65b05782f1179b5a9212a0f8e980e0e07b56da7663351", + "0xf861aa5ac7127d103e3174753736f3e3110f1317bc1f1c93d638b429ce8a3c9b", + "0xff8c1364e2ff988dbd8780352eeef599341bb010eb48af3019f8540b2b52b90e", + "0xf232b957fa2c9e83120050f8c4324247e12dfbd8f876880383a066f19d018ec6", + "0xd223658da6e25f5362c1abb49484414e4b9594ac7515e0b7d8aabd919866598c", + "0xcf038032ed455f73f04d503cf5796b196dea55d967ec6617f0a1a1623f144ebd", + "0xfa9da8a43eff2ac92f2c3996b2f5b18a92b95a56e61f26bb30ed47122cfc9e9f", + "0x1a17ad0ab073918397c17419deab441d666c0abdeb9f2104c47af4589dd4a2c3" + ], + "token_decimals": 8, + "token_name": "AGG", + "token_symbol": "AGG" } \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsLocalTx.t.sol b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsLocalTx.t.sol new file mode 100644 index 0000000000..94abe9ae54 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsLocalTx.t.sol @@ -0,0 +1,198 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; +import "@agglayer/interfaces/IBasePolygonZkEVMGlobalExitRoot.sol"; +import "./DepositContractTestHelpers.sol"; + +contract MockGlobalExitRootManagerLocal is IBasePolygonZkEVMGlobalExitRoot { + mapping(bytes32 => uint256) public override globalExitRootMap; + + function updateExitRoot(bytes32) external override {} + + function setGlobalExitRoot(bytes32 globalExitRoot) external { + globalExitRootMap[globalExitRoot] = block.number; + } +} + +/** + * @title ClaimAssetTestVectorsLocalTx + * @notice Test contract that generates test vectors for an L1 bridgeAsset transaction. + * This simulates calling bridgeAsset() on the PolygonZkEVMBridgeV2 contract + * and captures all relevant data including VALID Merkle proofs. + * Uses BridgeL2SovereignChain to get the authoritative claimedGlobalIndexHashChain. + * + * Run with: forge test -vv --match-contract ClaimAssetTestVectorsLocalTx + * + * The output can be used to verify Miden's ability to process L1 bridge transactions. + */ +contract ClaimAssetTestVectorsLocalTx is Test, DepositContractTestHelpers { + /** + * @notice Generates bridge asset test vectors with VALID Merkle proofs. + * Simulates a user calling bridgeAsset() to bridge tokens from L1 to Miden. + * + * Output file: test-vectors/bridge_asset_vectors.json + */ + function test_generateClaimAssetVectorsLocalTx() public { + string memory obj = "root"; + + // ====== BRIDGE TRANSACTION PARAMETERS ====== + + uint8 leafType = 0; + uint32 originNetwork = 0; + address originTokenAddress = 0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF; + uint32 destinationNetwork = 20; + address destinationAddress = 0x00000000AA0000000000bb000000cc000000Dd00; + uint256 amount = 100000000000000000000; + + bytes memory metadata = abi.encode("Test Token", "TEST", uint8(18)); + bytes32 metadataHash = keccak256(metadata); + + // ====== COMPUTE LEAF VALUE AND ADD TO TREE ====== + + bytes32 leafValue = getLeafValue( + leafType, originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash + ); + + // Add the leaf to the deposit tree to generate valid Merkle proof + _addLeaf(leafValue); + + // Get the deposit count (leaf index) - depositCount is uint256 in DepositContractBase + uint256 depositCountValue = uint256(depositCount); + + // Get the local exit root (root of the deposit tree) + bytes32 localExitRoot = getRoot(); + + // ====== GENERATE MERKLE PROOF ====== + + // Generate canonical zeros for the Merkle proof + bytes32[32] memory canonicalZeros = _computeCanonicalZeros(); + + // Build the Merkle proof from _branch array and canonical zeros + // The leaf index is depositCountValue - 1 (0-indexed) + uint256 leafIndex = depositCountValue - 1; + bytes32[32] memory smtProofLocal = _generateLocalProof(leafIndex, canonicalZeros); + + // For mainnet deposits, the rollup proof is all zeros + bytes32[32] memory smtProofRollup; + for (uint256 i = 0; i < 32; i++) { + smtProofRollup[i] = bytes32(0); + } + + // ====== COMPUTE EXIT ROOTS ====== + + // For a simulated L1 bridge transaction: + // - mainnetExitRoot is the local exit root from the deposit tree + // - rollupExitRoot is simulated (deterministic for reproducibility) + bytes32 mainnetExitRoot = localExitRoot; + bytes32 rollupExitRoot = keccak256(abi.encodePacked("rollup_exit_root_simulated")); + + // Compute global exit root + bytes32 globalExitRoot = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoot, rollupExitRoot); + + // ====== VERIFY MERKLE PROOF ====== + + // Verify that the generated proof is valid + require( + this.verifyMerkleProof(leafValue, smtProofLocal, uint32(leafIndex), mainnetExitRoot), + "Generated Merkle proof is invalid!" + ); + + // ====== COMPUTE GLOBAL INDEX ====== + + // Global index for mainnet deposits: (1 << 64) | leafIndex + // Note: leafIndex is 0-based (depositCount - 1), matching how the bridge contract + // extracts it via uint32(globalIndex) in _verifyLeaf() + uint256 globalIndex = (uint256(1) << 64) | uint256(leafIndex); + + // ====== COMPUTE CLAIMED GLOBAL INDEX HASH CHAIN ====== + // Use the actual BridgeL2SovereignChain to compute the authoritative value + + // Set up the global exit root manager + MockGlobalExitRootManagerLocal gerManager = new MockGlobalExitRootManagerLocal(); + gerManager.setGlobalExitRoot(globalExitRoot); + globalExitRootManager = IBasePolygonZkEVMGlobalExitRoot(address(gerManager)); + + // Use a non-zero network ID to match sovereign-chain requirements + networkID = 10; + + // Call _verifyLeafBridge to update claimedGlobalIndexHashChain + this.verifyLeafBridgeHarness( + smtProofLocal, + smtProofRollup, + globalIndex, + mainnetExitRoot, + rollupExitRoot, + leafType, + originNetwork, + originTokenAddress, + destinationNetwork, + destinationAddress, + amount, + metadataHash + ); + + // Read the updated claimedGlobalIndexHashChain + bytes32 claimedHashChain = claimedGlobalIndexHashChain; + + // ====== SERIALIZE SMT PROOFS ====== + _serializeProofs(obj, smtProofLocal, smtProofRollup); + + // Scoped block 2: Serialize transaction parameters + { + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes(obj, "metadata", metadata); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + vm.serializeBytes32(obj, "leaf_value", leafValue); + } + + // Scoped block 3: Serialize state, exit roots, and finalize + { + vm.serializeUint(obj, "deposit_count", depositCountValue); + vm.serializeBytes32(obj, "global_index", bytes32(globalIndex)); + vm.serializeBytes32(obj, "claimed_global_index_hash_chain", claimedHashChain); + vm.serializeBytes32(obj, "local_exit_root", localExitRoot); + vm.serializeBytes32(obj, "mainnet_exit_root", mainnetExitRoot); + vm.serializeBytes32(obj, "rollup_exit_root", rollupExitRoot); + vm.serializeBytes32(obj, "global_exit_root", globalExitRoot); + + string memory json = vm.serializeString( + obj, "description", "L1 bridgeAsset transaction test vectors with valid Merkle proofs" + ); + + string memory outputPath = "test-vectors/claim_asset_vectors_local_tx.json"; + vm.writeJson(json, outputPath); + + console.log("Generated claim asset local tx test vectors with valid Merkle proofs"); + console.log("Output file:", outputPath); + console.log("Leaf index:", leafIndex); + console.log("Deposit count:", depositCountValue); + } + } + + /** + * @notice Helper function to serialize SMT proofs (avoids stack too deep) + * @param obj The JSON object key + * @param smtProofLocal The local exit root proof + * @param smtProofRollup The rollup exit root proof + */ + function _serializeProofs(string memory obj, bytes32[32] memory smtProofLocal, bytes32[32] memory smtProofRollup) + internal + { + bytes32[] memory smtProofLocalDyn = new bytes32[](32); + bytes32[] memory smtProofRollupDyn = new bytes32[](32); + for (uint256 i = 0; i < 32; i++) { + smtProofLocalDyn[i] = smtProofLocal[i]; + smtProofRollupDyn[i] = smtProofRollup[i]; + } + + vm.serializeBytes32(obj, "smt_proof_local_exit_root", smtProofLocalDyn); + vm.serializeBytes32(obj, "smt_proof_rollup_exit_root", smtProofRollupDyn); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRealTx.t.sol b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRealTx.t.sol new file mode 100644 index 0000000000..8674e6cb06 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRealTx.t.sol @@ -0,0 +1,191 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; +import "@agglayer/interfaces/IBasePolygonZkEVMGlobalExitRoot.sol"; +import "./DepositContractTestHelpers.sol"; + +contract MockGlobalExitRootManagerReal is IBasePolygonZkEVMGlobalExitRoot { + mapping(bytes32 => uint256) public override globalExitRootMap; + + function updateExitRoot(bytes32) external override {} + + function setGlobalExitRoot(bytes32 globalExitRoot) external { + globalExitRootMap[globalExitRoot] = block.number; + } +} + +/** + * @title ClaimAssetTestVectorsRealTx + * @notice Test contract that generates comprehensive test vectors for verifying + * compatibility between Solidity's claimAsset and Miden's implementation. + * Uses BridgeL2SovereignChain to get the authoritative claimedGlobalIndexHashChain. + * + * Generates vectors for both LeafData and ProofData from a real transaction. + * + * Run with: forge test -vv --match-contract ClaimAssetTestVectorsRealTx + * + * The output can be compared against the Rust ClaimNoteStorage implementation. + */ +contract ClaimAssetTestVectorsRealTx is Test, DepositContractTestHelpers { + /** + * @notice Generates claim asset test vectors from real Katana transaction and saves to JSON. + * Uses real transaction data from Katana explorer: + * https://katanascan.com/tx/0x685f6437c4a54f5d6c59ea33de74fe51bc2401fea65dc3d72a976def859309bf + * + * Output file: test-vectors/claim_asset_vectors.json + */ + function test_generateClaimAssetVectors() public { + string memory obj = "root"; + + // ====== PROOF DATA ====== + bytes32[32] memory smtProofLocalExitRoot; + bytes32[32] memory smtProofRollupExitRoot; + uint256 globalIndex; + bytes32 mainnetExitRoot; + bytes32 rollupExitRoot; + bytes32 globalExitRoot; + + // Scoped block keeps stack usage under Solidity limits. + { + // SMT proof for local exit root (32 nodes) + smtProofLocalExitRoot = [ + bytes32(0x0000000000000000000000000000000000000000000000000000000000000000), + bytes32(0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5), + bytes32(0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30), + bytes32(0xe37d456460231cf80063f57ee83a02f70d810c568b3bfb71156d52445f7a885a), + bytes32(0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344), + bytes32(0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d), + bytes32(0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968), + bytes32(0x3236bf576fca1adf85917ec7888c4b89cce988564b6028f7d66807763aaa7b04), + bytes32(0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af), + bytes32(0x054ba828046324ff4794fce22adefb23b3ce749cd4df75ade2dc9f41dd327c31), + bytes32(0x4e9220076c344bf223c7e7cb2d47c9f0096c48def6a9056e41568de4f01d2716), + bytes32(0xca6369acd49a7515892f5936227037cc978a75853409b20f1145f1d44ceb7622), + bytes32(0x5a925caf7bfdf31344037ba5b42657130d049f7cb9e87877317e79fce2543a0c), + bytes32(0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb), + bytes32(0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc), + bytes32(0x4111a1a05cc06ad682bb0f213170d7d57049920d20fc4e0f7556a21b283a7e2a), + bytes32(0x77a0f8b0e0b4e5a57f5e381b3892bb41a0bcdbfdf3c7d591fae02081159b594d), + bytes32(0x361122b4b1d18ab577f2aeb6632c690713456a66a5670649ceb2c0a31e43ab46), + bytes32(0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0), + bytes32(0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0), + bytes32(0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2), + bytes32(0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9), + bytes32(0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377), + bytes32(0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652), + bytes32(0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef), + bytes32(0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d), + bytes32(0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0), + bytes32(0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e), + bytes32(0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e), + bytes32(0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322), + bytes32(0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735), + bytes32(0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9) + ]; + + // SMT proof for rollup exit root (32 nodes - all zeros for this rollup claim). + for (uint256 i = 0; i < 32; i++) { + smtProofRollupExitRoot[i] = bytes32(0); + } + + // Global index (uint256) - encodes rollup_id and deposit_count. + globalIndex = 18446744073709788808; + + // Exit roots + mainnetExitRoot = 0x31d3268d3a0145d65482b336935fa07dab0822f7dccd865f361d2bf122c4905c; + rollupExitRoot = 0x8452a95fd710163c5fa8ca2b2fe720d8781f0222bb9e82c2a442ec986c374858; + + // Compute global exit root: keccak256(mainnetExitRoot || rollupExitRoot) + globalExitRoot = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoot, rollupExitRoot); + + // forge-std JSON serialization supports `bytes32[]` but not `bytes32[32]`. + bytes32[] memory smtProofLocalExitRootDyn = new bytes32[](32); + bytes32[] memory smtProofRollupExitRootDyn = new bytes32[](32); + for (uint256 i = 0; i < 32; i++) { + smtProofLocalExitRootDyn[i] = smtProofLocalExitRoot[i]; + smtProofRollupExitRootDyn[i] = smtProofRollupExitRoot[i]; + } + + vm.serializeBytes32(obj, "smt_proof_local_exit_root", smtProofLocalExitRootDyn); + vm.serializeBytes32(obj, "smt_proof_rollup_exit_root", smtProofRollupExitRootDyn); + vm.serializeBytes32(obj, "global_index", bytes32(globalIndex)); + vm.serializeBytes32(obj, "mainnet_exit_root", mainnetExitRoot); + vm.serializeBytes32(obj, "rollup_exit_root", rollupExitRoot); + vm.serializeBytes32(obj, "global_exit_root", globalExitRoot); + } + + // ====== LEAF DATA ====== + // Scoped block keeps stack usage under Solidity limits. + { + uint8 leafType = 0; // 0 for ERC20/ETH transfer + uint32 originNetwork = 0; + address originTokenAddress = 0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF; + uint32 destinationNetwork = 20; + address destinationAddress = 0x00000000b0E79c68cafC54802726C6F102Cca300; + uint256 amount = 100000000000000; // 1e14 (0.0001 vbETH) + + // Original metadata from the transaction (ABI encoded: name, symbol, decimals) + // name = "Vault Bridge ETH", symbol = "vbETH", decimals = 18 + bytes memory metadata = + hex"000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000105661756c7420427269646765204554480000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000057662455448000000000000000000000000000000000000000000000000000000"; + bytes32 metadataHash = keccak256(metadata); + + // Compute the leaf value using the official DepositContractV2 implementation + bytes32 leafValue = getLeafValue( + leafType, + originNetwork, + originTokenAddress, + destinationNetwork, + destinationAddress, + amount, + metadataHash + ); + + // ====== COMPUTE CLAIMED GLOBAL INDEX HASH CHAIN ====== + // Use the actual BridgeL2SovereignChain to compute the authoritative value + + // Set up the global exit root manager + MockGlobalExitRootManagerReal gerManager = new MockGlobalExitRootManagerReal(); + gerManager.setGlobalExitRoot(globalExitRoot); + globalExitRootManager = IBasePolygonZkEVMGlobalExitRoot(address(gerManager)); + + // Use a non-zero network ID to match sovereign-chain requirements + networkID = 10; + + // Call _verifyLeafBridge to update claimedGlobalIndexHashChain + this.verifyLeafBridgeHarness( + smtProofLocalExitRoot, + smtProofRollupExitRoot, + globalIndex, + mainnetExitRoot, + rollupExitRoot, + leafType, + originNetwork, + originTokenAddress, + destinationNetwork, + destinationAddress, + amount, + metadataHash + ); + + // Read the updated claimedGlobalIndexHashChain + bytes32 claimedHashChain = claimedGlobalIndexHashChain; + + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + vm.serializeBytes32(obj, "leaf_value", leafValue); + string memory json = vm.serializeBytes32(obj, "claimed_global_index_hash_chain", claimedHashChain); + + // Save to file + string memory outputPath = "test-vectors/claim_asset_vectors_real_tx.json"; + vm.writeJson(json, outputPath); + } + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRollupTx.t.sol b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRollupTx.t.sol new file mode 100644 index 0000000000..edc867f29d --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRollupTx.t.sol @@ -0,0 +1,229 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; +import "@agglayer/interfaces/IBasePolygonZkEVMGlobalExitRoot.sol"; +import "./DepositContractTestHelpers.sol"; + +contract MockGlobalExitRootManagerRollup is IBasePolygonZkEVMGlobalExitRoot { + mapping(bytes32 => uint256) public override globalExitRootMap; + + function updateExitRoot(bytes32) external override {} + + function setGlobalExitRoot(bytes32 globalExitRoot) external { + globalExitRootMap[globalExitRoot] = block.number; + } +} + +/** + * @title RollupExitTree + * @notice Simulates the rollup exit tree from PolygonRollupManager. + * Each registered rollup has a fixed slot (rollupID - 1). The tree is a depth-32 + * Merkle tree where unregistered positions contain zero leaves. + * See PolygonRollupManager.getRollupExitRoot() for the production implementation. + */ +contract RollupExitTree is DepositContractBase, DepositContractTestHelpers { + /// @notice Place a local exit root at a specific rollup index (= rollupID - 1). + /// Earlier positions are filled with zero leaves (unregistered rollups). + function setLocalExitRootAt(bytes32 localExitRoot, uint256 rollupIndex) external { + for (uint256 i = 0; i < rollupIndex; i++) { + _addLeaf(bytes32(0)); + } + _addLeaf(localExitRoot); + } + + function generateProof(uint256 leafIndex) external view returns (bytes32[32] memory) { + bytes32[32] memory canonicalZeros = _computeCanonicalZeros(); + return _generateLocalProof(leafIndex, canonicalZeros); + } +} + +/** + * @title ClaimAssetTestVectorsRollupTx + * @notice Test contract that generates test vectors for a rollup deposit (mainnet_flag=0). + * This simulates a deposit on a rollup chain whose local exit root is then included + * in the rollup exit tree, requiring two-level Merkle proof verification. + * + * Uses non-zero leafIndex and indexRollup to exercise byte-ordering paths. + * + * Run with: forge test -vv --match-contract ClaimAssetTestVectorsRollupTx + * + * The output can be used to verify Miden's ability to process rollup bridge transactions. + */ +contract ClaimAssetTestVectorsRollupTx is Test, DepositContractV2, DepositContractTestHelpers { + /** + * @notice Generates rollup deposit test vectors with valid two-level Merkle proofs. + * + * Output file: test-vectors/claim_asset_vectors_rollup_tx.json + */ + function test_generateClaimAssetVectorsRollupTx() public { + string memory obj = "root"; + + // ====== BRIDGE TRANSACTION PARAMETERS ====== + + uint8 leafType = 0; + uint32 originNetwork = 3; // rollup network ID + address originTokenAddress = 0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF; + uint32 destinationNetwork = 20; + // Destination address with zero MSB (embeds a Miden AccountId) + address destinationAddress = 0x00000000AA0000000000bb000000cc000000Dd00; + uint256 amount = 100000000000000000000; + + bytes memory metadata = abi.encode("Test Token", "TEST", uint8(18)); + bytes32 metadataHash = keccak256(metadata); + + // ====== STEP 1: BUILD THE ROLLUP'S LOCAL EXIT TREE ====== + // Add dummy leaves before the target to get a non-zero leafIndex, + // exercising byte-swap paths in the MASM verification. + + bytes32 leafValue = getLeafValue( + leafType, originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash + ); + + // Add 2 dummy deposits before the real one -> leafIndex = 2 + _addLeaf(keccak256("dummy_deposit_0")); + _addLeaf(keccak256("dummy_deposit_1")); + _addLeaf(leafValue); + + uint256 leafIndex = depositCount - 1; // = 2 + bytes32 localExitRoot = getRoot(); + + // Generate the local exit root proof (leaf -> localExitRoot) + bytes32[32] memory canonicalZeros = _computeCanonicalZeros(); + bytes32[32] memory smtProofLocal = _generateLocalProof(leafIndex, canonicalZeros); + + // Verify local proof is valid + require( + this.verifyMerkleProof(leafValue, smtProofLocal, uint32(leafIndex), localExitRoot), + "Local Merkle proof is invalid!" + ); + + // ====== STEP 2: BUILD THE ROLLUP EXIT TREE ====== + // The rollup exit tree is a sparse Merkle tree where each rollup has a fixed slot + // at position (rollupID - 1). We place our localExitRoot at indexRollup = 5 + // (simulating rollupID = 6, with 5 earlier rollups having no bridge activity). + + RollupExitTree rollupTree = new RollupExitTree(); + + uint256 indexRollup = 5; + rollupTree.setLocalExitRootAt(localExitRoot, indexRollup); + + bytes32 rollupExitRoot = rollupTree.getRoot(); + + // Generate the rollup exit root proof (localExitRoot -> rollupExitRoot) + bytes32[32] memory smtProofRollup = rollupTree.generateProof(indexRollup); + + // Verify rollup proof is valid + require( + rollupTree.verifyMerkleProof(localExitRoot, smtProofRollup, uint32(indexRollup), rollupExitRoot), + "Rollup Merkle proof is invalid!" + ); + + // ====== STEP 3: VERIFY TWO-LEVEL PROOF (matching Solidity _verifyLeaf rollup path) ====== + // For rollup deposits, verification is: + // 1. calculateRoot(leafValue, smtProofLocal, leafIndex) == localExitRoot + // 2. verifyMerkleProof(localExitRoot, smtProofRollup, indexRollup, rollupExitRoot) + + bytes32 computedLocalRoot = this.calculateRoot(leafValue, smtProofLocal, uint32(leafIndex)); + require(computedLocalRoot == localExitRoot, "Two-level step 1 failed: computed local root mismatch"); + require( + this.verifyMerkleProof(computedLocalRoot, smtProofRollup, uint32(indexRollup), rollupExitRoot), + "Two-level step 2 failed: rollup proof verification failed" + ); + + // ====== STEP 4: COMPUTE EXIT ROOTS AND GLOBAL INDEX ====== + + // For a rollup deposit, mainnetExitRoot is arbitrary (simulated) + bytes32 mainnetExitRoot = keccak256(abi.encodePacked("mainnet_exit_root_simulated")); + + // Compute global exit root + bytes32 globalExitRoot = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoot, rollupExitRoot); + + // Global index for rollup deposits: (indexRollup << 32) | leafIndex (no mainnet flag bit) + uint256 globalIndex = (uint256(indexRollup) << 32) | uint256(leafIndex); + + // ====== STEP 5: COMPUTE CLAIMED GLOBAL INDEX HASH CHAIN ====== + // Use the actual BridgeL2SovereignChain to compute the authoritative value. + + MockGlobalExitRootManagerRollup gerManager = new MockGlobalExitRootManagerRollup(); + gerManager.setGlobalExitRoot(globalExitRoot); + globalExitRootManager = IBasePolygonZkEVMGlobalExitRoot(address(gerManager)); + + // Use a non-zero network ID to match sovereign-chain requirements + networkID = 10; + + // Call _verifyLeafBridge to update claimedGlobalIndexHashChain + this.verifyLeafBridgeHarness( + smtProofLocal, + smtProofRollup, + globalIndex, + mainnetExitRoot, + rollupExitRoot, + leafType, + originNetwork, + originTokenAddress, + destinationNetwork, + destinationAddress, + amount, + metadataHash + ); + + bytes32 claimedHashChain = claimedGlobalIndexHashChain; + + // ====== SERIALIZE TO JSON ====== + _serializeProofs(obj, smtProofLocal, smtProofRollup); + + { + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes(obj, "metadata", metadata); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + vm.serializeBytes32(obj, "leaf_value", leafValue); + } + + { + vm.serializeUint(obj, "deposit_count", uint256(depositCount)); + vm.serializeBytes32(obj, "global_index", bytes32(globalIndex)); + vm.serializeBytes32(obj, "claimed_global_index_hash_chain", claimedHashChain); + vm.serializeBytes32(obj, "local_exit_root", localExitRoot); + vm.serializeBytes32(obj, "mainnet_exit_root", mainnetExitRoot); + vm.serializeBytes32(obj, "rollup_exit_root", rollupExitRoot); + vm.serializeBytes32(obj, "global_exit_root", globalExitRoot); + + string memory json = vm.serializeString( + obj, "description", "Rollup deposit test vectors with valid two-level Merkle proofs (non-zero indices)" + ); + + string memory outputPath = "test-vectors/claim_asset_vectors_rollup_tx.json"; + vm.writeJson(json, outputPath); + + console.log("Generated rollup deposit test vectors with valid two-level Merkle proofs"); + console.log("Output file:", outputPath); + console.log("Leaf index:", leafIndex); + console.log("Rollup index:", indexRollup); + } + } + + /** + * @notice Helper function to serialize SMT proofs (avoids stack too deep) + */ + function _serializeProofs(string memory obj, bytes32[32] memory smtProofLocal, bytes32[32] memory smtProofRollup) + internal + { + bytes32[] memory smtProofLocalDyn = new bytes32[](32); + bytes32[] memory smtProofRollupDyn = new bytes32[](32); + for (uint256 i = 0; i < 32; i++) { + smtProofLocalDyn[i] = smtProofLocal[i]; + smtProofRollupDyn[i] = smtProofRollup[i]; + } + + vm.serializeBytes32(obj, "smt_proof_local_exit_root", smtProofLocalDyn); + vm.serializeBytes32(obj, "smt_proof_rollup_exit_root", smtProofRollupDyn); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/DepositContractTestHelpers.sol b/crates/miden-agglayer/solidity-compat/test/DepositContractTestHelpers.sol new file mode 100644 index 0000000000..91fd98db5c --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/DepositContractTestHelpers.sol @@ -0,0 +1,80 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "@agglayer/v2/sovereignChains/BridgeL2SovereignChain.sol"; + +/** + * @title DepositContractTestHelpers + * @notice Shared helpers for Sparse Merkle Tree and claim-asset vector generation. + * Inherited by SMTMerkleProofVectors and ClaimAssetTestVectors*. + */ +abstract contract DepositContractTestHelpers is BridgeL2SovereignChain { + /** + * @notice Computes the canonical zero hashes for the Sparse Merkle Tree. + * @dev Each level i has zero hash: keccak256(zero[i-1], zero[i-1]) + * @return canonicalZeros Array of 32 zero hashes, one per tree level + */ + function _computeCanonicalZeros() internal pure returns (bytes32[32] memory canonicalZeros) { + bytes32 current = bytes32(0); + for (uint256 i = 0; i < 32; i++) { + canonicalZeros[i] = current; + current = keccak256(abi.encodePacked(current, current)); + } + } + + /** + * @notice Generates the SMT proof for a given leaf index using the current _branch state. + * @dev For each level i: + * - If bit i of leafIndex is 1: use _branch[i] (sibling on left) + * - If bit i of leafIndex is 0: use canonicalZeros[i] (sibling on right) + * @param leafIndex The 0-indexed position of the leaf in the tree + * @param canonicalZeros The precomputed canonical zero hashes + * @return smtProof The 32-element Merkle proof array + */ + function _generateLocalProof(uint256 leafIndex, bytes32[32] memory canonicalZeros) + internal + view + returns (bytes32[32] memory smtProof) + { + for (uint256 i = 0; i < 32; i++) { + if ((leafIndex >> i) & 1 == 1) { + smtProof[i] = _branch[i]; + } else { + smtProof[i] = canonicalZeros[i]; + } + } + } + + /** + * @notice Harness function to call _verifyLeafBridge externally + */ + function verifyLeafBridgeHarness( + bytes32[32] calldata smtProofLocalExitRoot, + bytes32[32] calldata smtProofRollupExitRoot, + uint256 globalIndex, + bytes32 mainnetExitRoot, + bytes32 rollupExitRoot, + uint8 leafType, + uint32 originNetwork, + address originTokenAddress, + uint32 destinationNetwork, + address destinationAddress, + uint256 amount, + bytes32 metadataHash + ) external { + _verifyLeafBridge( + smtProofLocalExitRoot, + smtProofRollupExitRoot, + globalIndex, + mainnetExitRoot, + rollupExitRoot, + leafType, + originNetwork, + originTokenAddress, + destinationNetwork, + destinationAddress, + amount, + metadataHash + ); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/ExitRoots.t.sol b/crates/miden-agglayer/solidity-compat/test/ExitRoots.t.sol new file mode 100644 index 0000000000..b518e5de15 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ExitRoots.t.sol @@ -0,0 +1,57 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; + +/** + * @title ExitRootsTestVectors + * @notice Test contract that generates global exit root test vectors from + * mainnet-rollup exit root pairs. + * + * Run with: forge test -vv --match-contract ExitRootsTestVectors + * + * The output can be compared against Rust implementations that compute + * the global exit root as keccak256(mainnetExitRoot || rollupExitRoot). + */ +contract ExitRootsTestVectors is Test { + /** + * @notice Generates global exit root vectors from mainnet-rollup pairs + * and saves to JSON file. + * + * Output file: test-vectors/exit_roots.json + */ + function test_generateExitRootVectors() public { + // Input: pairs of (mainnetExitRoot, rollupExitRoot) from mainnet transactions + // Source transaction hashes from https://explorer.lumia.org/: + // TX 1: 0xe1a20811d757c48eba534f63041f58cd39eec762bfb6e4496dccf4e675fd5619 + // TX 2: 0xe64254ff002b3d46b46af077fa24c6ef5b54d950759d70d6d9a693b1d36de188 + bytes32[] memory mainnetExitRoots = new bytes32[](2); + bytes32[] memory rollupExitRoots = new bytes32[](2); + + // Pair 1 (TX: 0xe1a20811d757c48eba534f63041f58cd39eec762bfb6e4496dccf4e675fd5619) + mainnetExitRoots[0] = bytes32(0x98c911b6dcface93fd0bb490d09390f2f7f9fcf36fc208cbb36528a229298326); + rollupExitRoots[0] = bytes32(0x6a2533a24cc2a3feecf5c09b6a270bbb24a5e2ce02c18c0e26cd54c3dddc2d70); + + // Pair 2 (TX: 0xe64254ff002b3d46b46af077fa24c6ef5b54d950759d70d6d9a693b1d36de188) + mainnetExitRoots[1] = bytes32(0xbb71d991caf89fe64878259a61ae8d0b4310c176e66d90fd2370b02573e80c90); + rollupExitRoots[1] = bytes32(0xd9b546933b59acd388dc0c6520cbf2d4dbb9bac66f74f167ba70f221d82a440c); + + // Compute global exit roots + bytes32[] memory globalExitRoots = new bytes32[](mainnetExitRoots.length); + for (uint256 i = 0; i < mainnetExitRoots.length; i++) { + globalExitRoots[i] = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoots[i], rollupExitRoots[i]); + } + + // Serialize parallel arrays to JSON + string memory obj = "root"; + vm.serializeBytes32(obj, "mainnet_exit_roots", mainnetExitRoots); + vm.serializeBytes32(obj, "rollup_exit_roots", rollupExitRoots); + string memory json = vm.serializeBytes32(obj, "global_exit_roots", globalExitRoots); + + // Save to file + string memory outputPath = "test-vectors/exit_roots.json"; + vm.writeJson(json, outputPath); + console.log("Saved exit root vectors to:", outputPath); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/LeafValueTestVectors.t.sol b/crates/miden-agglayer/solidity-compat/test/LeafValueTestVectors.t.sol new file mode 100644 index 0000000000..3d39576a02 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/LeafValueTestVectors.t.sol @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; + +/** + * @title LeafValueTestVectors + * @notice Test contract that generates test vectors for verifying compatibility + * between Solidity's getLeafValue and Miden's keccak hash implementation. + * + * Run with: forge test -vv --match-contract LeafValueTestVectors + * + * The output can be compared against the Rust get_leaf_value implementation. + */ +contract LeafValueTestVectors is Test, DepositContractV2 { + /** + * @notice Generates leaf value test vectors and saves to JSON file. + * Uses real transaction data from Lumia explorer: + * https://explorer.lumia.org/tx/0xe64254ff002b3d46b46af077fa24c6ef5b54d950759d70d6d9a693b1d36de188 + * + * Output file: test-vectors/leaf_value_vectors.json + */ + function test_generateLeafValueVectors() public { + // Test vector from real Lumia bridge transaction + uint8 leafType = 0; // 0 for ERC20/ETH transfer + uint32 originNetwork = 0; + address originTokenAddress = 0xD9343a049D5DBd89CD19DC6BcA8c48fB3a0a42a7; + uint32 destinationNetwork = 7; + address destinationAddress = 0xD9b20Fe633b609B01081aD0428e81f8Dd604F5C5; + uint256 amount = 2000000000000000000; // 2e18 + + // Original metadata from the transaction (ABI encoded: name, symbol, decimals) + bytes memory metadata = + hex"000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000b4c756d696120546f6b656e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000054c554d4941000000000000000000000000000000000000000000000000000000"; + bytes32 metadataHash = keccak256(metadata); + + // Compute the leaf value using the official DepositContractV2 implementation + bytes32 leafValue = getLeafValue( + leafType, originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash + ); + + // Serialize to JSON + string memory obj = "root"; + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + string memory json = vm.serializeBytes32(obj, "leaf_value", leafValue); + + // Save to file + string memory outputPath = "test-vectors/leaf_value_vectors.json"; + vm.writeJson(json, outputPath); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol b/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol index 2e5b016232..977dddbc89 100644 --- a/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol +++ b/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol @@ -2,20 +2,62 @@ pragma solidity ^0.8.20; import "forge-std/Test.sol"; -import "@agglayer/v2/lib/DepositContractBase.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; /** * @title MMRTestVectors * @notice Test contract that generates test vectors for verifying compatibility * between Solidity's DepositContractBase and Miden's MMR Frontier implementation. - * + * + * Leaves are constructed via getLeafValue using the same hardcoded fields that + * bridge_out.masm uses (leafType=0, originNetwork=64, originTokenAddress=fixed random value, + * metadataHash=0), parametrised by amount (i+1) and deterministic per-leaf + * destination network/address values derived from a fixed seed. + * * Run with: forge test -vv --match-contract MMRTestVectors - * + * * The output can be compared against the Rust KeccakMmrFrontier32 implementation * in crates/miden-testing/tests/agglayer/mmr_frontier.rs */ -contract MMRTestVectors is Test, DepositContractBase { - +contract MMRTestVectors is Test, DepositContractV2 { + // Constants matching bridge_out.masm hardcoded values + uint8 constant LEAF_TYPE = 0; + uint32 constant ORIGIN_NETWORK = 64; + address constant ORIGIN_TOKEN_ADDR = 0x7a6fC3e8b57c6D1924F1A9d0E2b3c4D5e6F70891; + + // Token metadata (single source of truth for Rust tests) + string constant TOKEN_NAME = "AGG"; + string constant TOKEN_SYMBOL = "AGG"; + uint8 constant TOKEN_DECIMALS = 8; + + bytes32 constant METADATA_HASH = keccak256(abi.encode(TOKEN_NAME, TOKEN_SYMBOL, TOKEN_DECIMALS)); + + // Fixed seed for deterministic "random" destination vectors. + // Keeping this constant ensures everyone regenerates the exact same JSON vectors. + uint256 constant VECTOR_SEED = uint256(keccak256("agglayer::mmr_frontier_vectors::v2")); + + /** + * @notice Builds a leaf hash identical to what bridge_out.masm would produce for the + * given amount. + */ + function _createLeaf(uint256 amount, uint32 destinationNetwork, address destinationAddress) + internal + pure + returns (bytes32) + { + return getLeafValue( + LEAF_TYPE, ORIGIN_NETWORK, ORIGIN_TOKEN_ADDR, destinationNetwork, destinationAddress, amount, METADATA_HASH + ); + } + + function _destinationNetworkAt(uint256 idx) internal pure returns (uint32) { + return uint32(uint256(keccak256(abi.encodePacked(VECTOR_SEED, bytes1(0x01), idx)))); + } + + function _destinationAddressAt(uint256 idx) internal pure returns (address) { + return address(uint160(uint256(keccak256(abi.encodePacked(VECTOR_SEED, bytes1(0x02), idx))))); + } + /** * @notice Generates the canonical zeros and saves to JSON file. * ZERO_0 = 0x0...0 (32 zero bytes) @@ -25,7 +67,7 @@ contract MMRTestVectors is Test, DepositContractBase { */ function test_generateCanonicalZeros() public { bytes32[] memory zeros = new bytes32[](32); - + bytes32 z = bytes32(0); for (uint256 i = 0; i < 32; i++) { zeros[i] = z; @@ -34,37 +76,59 @@ contract MMRTestVectors is Test, DepositContractBase { // Foundry serializes bytes32[] to a JSON array automatically string memory json = vm.serializeBytes32("root", "canonical_zeros", zeros); - + // Save to file string memory outputPath = "test-vectors/canonical_zeros.json"; vm.writeJson(json, outputPath); console.log("Saved canonical zeros to:", outputPath); } - + /** * @notice Generates MMR frontier vectors (leaf-root pairs) and saves to JSON file. - * Uses parallel arrays instead of array of objects for cleaner serialization. + * Each leaf is created via _createLeaf(i+1, network[i], address[i]) so that: + * - amounts are 1..32 + * - destination networks/addresses are deterministic per index from VECTOR_SEED + * + * The destination vectors are also written to JSON so the Rust bridge_out test + * can construct matching B2AGG notes. + * * Output file: test-vectors/mmr_frontier_vectors.json */ function test_generateVectors() public { bytes32[] memory leaves = new bytes32[](32); bytes32[] memory roots = new bytes32[](32); uint256[] memory counts = new uint256[](32); + uint256[] memory amounts = new uint256[](32); + uint256[] memory destinationNetworks = new uint256[](32); + address[] memory destinationAddresses = new address[](32); for (uint256 i = 0; i < 32; i++) { - bytes32 leaf = bytes32(i); + uint256 amount = i + 1; + uint32 destinationNetwork = _destinationNetworkAt(i); + address destinationAddress = _destinationAddressAt(i); + bytes32 leaf = _createLeaf(amount, destinationNetwork, destinationAddress); _addLeaf(leaf); leaves[i] = leaf; roots[i] = getRoot(); counts[i] = depositCount; + amounts[i] = amount; + destinationNetworks[i] = destinationNetwork; + destinationAddresses[i] = destinationAddress; } // Serialize parallel arrays to JSON string memory obj = "root"; vm.serializeBytes32(obj, "leaves", leaves); vm.serializeBytes32(obj, "roots", roots); - string memory json = vm.serializeUint(obj, "counts", counts); + vm.serializeUint(obj, "counts", counts); + vm.serializeUint(obj, "amounts", amounts); + vm.serializeUint(obj, "destination_networks", destinationNetworks); + vm.serializeAddress(obj, "origin_token_address", ORIGIN_TOKEN_ADDR); + vm.serializeString(obj, "token_name", TOKEN_NAME); + vm.serializeString(obj, "token_symbol", TOKEN_SYMBOL); + vm.serializeUint(obj, "token_decimals", uint256(TOKEN_DECIMALS)); + string memory json = vm.serializeAddress(obj, "destination_addresses", destinationAddresses); // Save to file string memory outputPath = "test-vectors/mmr_frontier_vectors.json"; diff --git a/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol b/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol index 5867414ec6..e6b466e521 100644 --- a/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol +++ b/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol @@ -2,41 +2,34 @@ pragma solidity ^0.8.20; import "forge-std/Test.sol"; -import "@agglayer/v2/lib/DepositContractBase.sol"; +import "./DepositContractTestHelpers.sol"; /** * @title SMTMerkleProofVectors * @notice Test contract that generates test vectors for Merkle proofs verification. - * + * * Run with: forge test -vv --match-contract SMTMerkleProofVectors - * + * * The output can be used during the bridge-in tests in * crates/miden-testing/tests/agglayer/bridge_in.rs */ -contract SMTMerkleProofVectors is Test, DepositContractBase { - +contract SMTMerkleProofVectors is Test, DepositContractTestHelpers { /** * @notice Generates vectors of leaves, roots and merkle paths and saves them to the JSON. - * Notice that each value in the leaves/roots array corresponds to 32 values in the + * Notice that each value in the leaves/roots array corresponds to 32 values in the * merkle paths array. */ function test_generateVerificationProofData() public { bytes32[] memory leaves = new bytes32[](32); bytes32[] memory roots = new bytes32[](32); bytes32[] memory merkle_paths = new bytes32[](1024); - bytes32[] memory canonical_zeros = new bytes32[](32); - // This array represent a merkle path during each iteration. - // This is a work around which allows to provide the merkle path to the verifyMerkleProof - // function, since the merkle_paths array cannot be sliced. + // This array represents a merkle path during each iteration. + // This is a workaround which allows to provide the merkle path to verifyMerkleProof + // since the merkle_paths array cannot be sliced. bytes32[32] memory current_path; - - // generate canonical zeros array - bytes32 z = bytes32(0); - for (uint256 i = 0; i < 32; i++) { - canonical_zeros[i] = z; - z = keccak256(abi.encodePacked(z, z)); - } + + bytes32[32] memory canonicalZeros = _computeCanonicalZeros(); // generate leaves, roots, and merkle_paths arrays for (uint256 i = 0; i < 32; i++) { @@ -48,18 +41,9 @@ contract SMTMerkleProofVectors is Test, DepositContractBase { // the overall number of leaves in the SMT instead of the index of the last leaf), so we // first update the merkle_paths array and only after that actually add a leaf and // recompute the _branch. - // - // Merkle paths in the _branch array contain plain zeros for the nodes which were not - // updated during the leaf insertion. To get the proper Merkle path we should use - // canonical zeros instead. + current_path = _generateLocalProof(i, canonicalZeros); for (uint256 j = 0; j < 32; j++) { - if (i >> j & 1 == 1) { - merkle_paths[i * 32 + j] = _branch[j]; - current_path[j] = _branch[j]; - } else { - merkle_paths[i * 32 + j] = canonical_zeros[j]; - current_path[j] = canonical_zeros[j]; - } + merkle_paths[i * 32 + j] = current_path[j]; } _addLeaf(leaf); diff --git a/crates/miden-agglayer/src/b2agg_note.rs b/crates/miden-agglayer/src/b2agg_note.rs index 255082032a..336fab0491 100644 --- a/crates/miden-agglayer/src/b2agg_note.rs +++ b/crates/miden-agglayer/src/b2agg_note.rs @@ -6,8 +6,9 @@ use alloc::string::ToString; use alloc::vec::Vec; -use miden_assembly::utils::Deserializable; -use miden_core::{Felt, Program, Word}; +use miden_assembly::serde::Deserializable; +use miden_core::program::Program; +use miden_core::{Felt, Word}; use miden_protocol::account::AccountId; use miden_protocol::crypto::rand::FeltRng; use miden_protocol::errors::NoteError; @@ -32,7 +33,7 @@ use crate::EthAddressFormat; // Initialize the B2AGG note script only once static B2AGG_SCRIPT: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/B2AGG.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped B2AGG script is well-formed"); + let program = Program::read_from_bytes(bytes).expect("shipped B2AGG script is well-formed"); NoteScript::new(program) }); @@ -123,7 +124,8 @@ fn build_note_storage( ) -> Result { let mut elements = Vec::with_capacity(6); - elements.push(Felt::new(destination_network as u64)); + let destination_network = u32::from_le_bytes(destination_network.to_be_bytes()); + elements.push(Felt::from(destination_network)); elements.extend(destination_address.to_elements()); NoteStorage::new(elements) diff --git a/crates/miden-agglayer/src/bridge.rs b/crates/miden-agglayer/src/bridge.rs new file mode 100644 index 0000000000..c507fffa06 --- /dev/null +++ b/crates/miden-agglayer/src/bridge.rs @@ -0,0 +1,475 @@ +extern crate alloc; + +use alloc::vec; +use alloc::vec::Vec; + +use miden_core::{Felt, ONE, Word, ZERO}; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + Account, + AccountComponent, + AccountId, + AccountType, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::crypto::hash::poseidon2::Poseidon2; +use miden_utils_sync::LazyLock; +use thiserror::Error; + +use super::agglayer_bridge_component_library; +use crate::claim_note::Keccak256Output; +pub use crate::{ + B2AggNote, + ClaimNoteStorage, + ConfigAggBridgeNote, + EthAddressFormat, + EthAmount, + EthAmountError, + ExitRoot, + GlobalIndex, + GlobalIndexError, + LeafData, + MetadataHash, + ProofData, + SmtNode, + UpdateGerNote, + create_claim_note, +}; + +// CONSTANTS +// ================================================================================================ +// Include the generated agglayer constants +include!(concat!(env!("OUT_DIR"), "/agglayer_constants.rs")); + +// AGGLAYER BRIDGE STRUCT +// ================================================================================================ + +// bridge config +// ------------------------------------------------------------------------------------------------ + +static BRIDGE_ADMIN_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::admin_account_id") + .expect("bridge admin account ID storage slot name should be valid") +}); +static GER_MANAGER_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::ger_manager_account_id") + .expect("GER manager account ID storage slot name should be valid") +}); +static GER_MAP_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::ger_map") + .expect("GER map storage slot name should be valid") +}); +static FAUCET_REGISTRY_MAP_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::faucet_registry_map") + .expect("faucet registry map storage slot name should be valid") +}); +static TOKEN_REGISTRY_MAP_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::token_registry_map") + .expect("token registry map storage slot name should be valid") +}); + +// bridge in +// ------------------------------------------------------------------------------------------------ + +static CLAIM_NULLIFIERS_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::claim_nullifiers") + .expect("claim nullifiers storage slot name should be valid") +}); +static CGI_CHAIN_HASH_LO_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::cgi_chain_hash_lo") + .expect("CGI chain hash_lo storage slot name should be valid") +}); +static CGI_CHAIN_HASH_HI_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::cgi_chain_hash_hi") + .expect("CGI chain hash_hi storage slot name should be valid") +}); + +// bridge out +// ------------------------------------------------------------------------------------------------ + +static LET_FRONTIER_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::let_frontier") + .expect("LET frontier storage slot name should be valid") +}); +static LET_ROOT_LO_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::let_root_lo") + .expect("LET root_lo storage slot name should be valid") +}); +static LET_ROOT_HI_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::let_root_hi") + .expect("LET root_hi storage slot name should be valid") +}); +static LET_NUM_LEAVES_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::bridge::let_num_leaves") + .expect("LET num_leaves storage slot name should be valid") +}); + +/// An [`AccountComponent`] implementing the AggLayer Bridge. +/// +/// It reexports the procedures from `agglayer::bridge`. When linking against this +/// component, the `agglayer` library must be available to the assembler. +/// The procedures of this component are: +/// - `register_faucet`, which registers a faucet in the bridge. +/// - `update_ger`, which injects a new GER into the storage map. +/// - `bridge_out`, which bridges an asset out of Miden to the destination network. +/// - `claim`, which validates a claim against the AggLayer bridge and creates a MINT note for the +/// AggFaucet. +/// +/// ## Storage Layout +/// +/// - [`Self::bridge_admin_id_slot_name`]: Stores the bridge admin account ID. +/// - [`Self::ger_manager_id_slot_name`]: Stores the GER manager account ID. +/// - [`Self::ger_map_slot_name`]: Stores the GERs. +/// - [`Self::faucet_registry_map_slot_name`]: Stores the faucet registry map. +/// - [`Self::token_registry_map_slot_name`]: Stores the token address → faucet ID map. +/// - [`Self::claim_nullifiers_slot_name`]: Stores the CLAIM note nullifiers map (RPO(leaf_index, +/// source_bridge_network) → \[1, 0, 0, 0\]). +/// - [`Self::cgi_chain_hash_lo_slot_name`]: Stores the lower 128 bits of the CGI chain hash. +/// - [`Self::cgi_chain_hash_hi_slot_name`]: Stores the upper 128 bits of the CGI chain hash. +/// - [`Self::let_frontier_slot_name`]: Stores the Local Exit Tree (LET) frontier. +/// - [`Self::let_root_lo_slot_name`]: Stores the lower 32 bits of the LET root. +/// - [`Self::let_root_hi_slot_name`]: Stores the upper 32 bits of the LET root. +/// - [`Self::let_num_leaves_slot_name`]: Stores the number of leaves in the LET frontier. +/// +/// The bridge starts with an empty faucet registry; faucets are registered at runtime via +/// CONFIG_AGG_BRIDGE notes. +#[derive(Debug, Clone)] +pub struct AggLayerBridge { + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +} + +impl AggLayerBridge { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + const REGISTERED_GER_MAP_VALUE: Word = Word::new([ONE, ZERO, ZERO, ZERO]); + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new AggLayer bridge component with the standard configuration. + pub fn new(bridge_admin_id: AccountId, ger_manager_id: AccountId) -> Self { + Self { bridge_admin_id, ger_manager_id } + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + // --- bridge config ---- + + /// Storage slot name for the bridge admin account ID. + pub fn bridge_admin_id_slot_name() -> &'static StorageSlotName { + &BRIDGE_ADMIN_ID_SLOT_NAME + } + + /// Storage slot name for the GER manager account ID. + pub fn ger_manager_id_slot_name() -> &'static StorageSlotName { + &GER_MANAGER_ID_SLOT_NAME + } + + /// Storage slot name for the GERs map. + pub fn ger_map_slot_name() -> &'static StorageSlotName { + &GER_MAP_SLOT_NAME + } + + /// Storage slot name for the faucet registry map. + pub fn faucet_registry_map_slot_name() -> &'static StorageSlotName { + &FAUCET_REGISTRY_MAP_SLOT_NAME + } + + /// Storage slot name for the token registry map. + pub fn token_registry_map_slot_name() -> &'static StorageSlotName { + &TOKEN_REGISTRY_MAP_SLOT_NAME + } + + // --- bridge in -------- + + /// Storage slot name for the CLAIM note nullifiers map. + pub fn claim_nullifiers_slot_name() -> &'static StorageSlotName { + &CLAIM_NULLIFIERS_SLOT_NAME + } + + /// Storage slot name for the lower 128 bits of the CGI chain hash. + pub fn cgi_chain_hash_lo_slot_name() -> &'static StorageSlotName { + &CGI_CHAIN_HASH_LO_SLOT_NAME + } + + /// Storage slot name for the upper 128 bits of the CGI chain hash. + pub fn cgi_chain_hash_hi_slot_name() -> &'static StorageSlotName { + &CGI_CHAIN_HASH_HI_SLOT_NAME + } + + // --- bridge out ------- + + /// Storage slot name for the Local Exit Tree (LET) frontier. + pub fn let_frontier_slot_name() -> &'static StorageSlotName { + &LET_FRONTIER_SLOT_NAME + } + + /// Storage slot name for the lower 32 bits of the LET root. + pub fn let_root_lo_slot_name() -> &'static StorageSlotName { + &LET_ROOT_LO_SLOT_NAME + } + + /// Storage slot name for the upper 32 bits of the LET root. + pub fn let_root_hi_slot_name() -> &'static StorageSlotName { + &LET_ROOT_HI_SLOT_NAME + } + + /// Storage slot name for the number of leaves in the LET frontier. + pub fn let_num_leaves_slot_name() -> &'static StorageSlotName { + &LET_NUM_LEAVES_SLOT_NAME + } + + /// Returns a boolean indicating whether the provided GER is present in storage of the provided + /// bridge account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerBridge`] account. + pub fn is_ger_registered( + ger: ExitRoot, + bridge_account: Account, + ) -> Result { + // check that the provided account is a bridge account + Self::assert_bridge_account(&bridge_account)?; + + // Compute the expected GER hash: poseidon2::merge(GER_LOWER, GER_UPPER) + let ger_lower: Word = ger.to_elements()[0..4].try_into().unwrap(); + let ger_upper: Word = ger.to_elements()[4..8].try_into().unwrap(); + let ger_hash = Poseidon2::merge(&[ger_lower, ger_upper]); + + // Get the value stored by the GER hash. If this GER was registered, the value would be + // equal to [1, 0, 0, 0] + let stored_value = bridge_account + .storage() + .get_map_item(AggLayerBridge::ger_map_slot_name(), ger_hash) + .expect("provided account should have AggLayer Bridge specific storage slots"); + + if stored_value == Self::REGISTERED_GER_MAP_VALUE { + Ok(true) + } else { + Ok(false) + } + } + + /// Reads the Local Exit Root (double-word) from the bridge account's storage. + /// + /// The Local Exit Root is stored in two dedicated value slots: + /// - [`AggLayerBridge::let_root_lo_slot_name`] — low word of the root + /// - [`AggLayerBridge::let_root_hi_slot_name`] — high word of the root + /// + /// Returns the 256-bit root as 8 `Felt`s: first the 4 elements of `root_lo`, followed by the 4 + /// elements of `root_hi`. For an empty/uninitialized tree, all elements are zeros. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerBridge`] account. + pub fn read_local_exit_root(account: &Account) -> Result, AgglayerBridgeError> { + // check that the provided account is a bridge account + Self::assert_bridge_account(account)?; + + let root_lo_slot = AggLayerBridge::let_root_lo_slot_name(); + let root_hi_slot = AggLayerBridge::let_root_hi_slot_name(); + + let root_lo = account + .storage() + .get_item(root_lo_slot) + .expect("should be able to read LET root lo"); + let root_hi = account + .storage() + .get_item(root_hi_slot) + .expect("should be able to read LET root hi"); + + let mut root = Vec::with_capacity(8); + root.extend(root_lo.to_vec()); + root.extend(root_hi.to_vec()); + + Ok(root) + } + + /// Returns the number of leaves in the Local Exit Tree (LET) frontier. + pub fn read_let_num_leaves(account: &Account) -> u64 { + let num_leaves_slot = AggLayerBridge::let_num_leaves_slot_name(); + let value = account + .storage() + .get_item(num_leaves_slot) + .expect("should be able to read LET num leaves"); + value.to_vec()[0].as_canonical_u64() + } + + /// Returns the claimed global index (CGI) chain hash from the corresponding storage slot. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerBridge`] account. + pub fn cgi_chain_hash( + bridge_account: &Account, + ) -> Result { + // check that the provided account is a bridge account + Self::assert_bridge_account(bridge_account)?; + + let cgi_chain_hash_lo = bridge_account + .storage() + .get_item(AggLayerBridge::cgi_chain_hash_lo_slot_name()) + .expect("failed to get CGI hash chain lo slot"); + let cgi_chain_hash_hi = bridge_account + .storage() + .get_item(AggLayerBridge::cgi_chain_hash_hi_slot_name()) + .expect("failed to get CGI hash chain hi slot"); + + let cgi_chain_hash_bytes = cgi_chain_hash_lo + .iter() + .chain(cgi_chain_hash_hi.iter()) + .flat_map(|felt| { + (u32::try_from(felt.as_canonical_u64()).expect("Felt value does not fit into u32")) + .to_le_bytes() + }) + .collect::>(); + + Ok(Keccak256Output::new( + cgi_chain_hash_bytes + .try_into() + .expect("keccak hash should consist of exactly 32 bytes"), + )) + } + + // HELPER FUNCTIONS + // -------------------------------------------------------------------------------------------- + + /// Checks that the provided account is an [`AggLayerBridge`] account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account does not have all AggLayer Bridge specific storage slots. + /// - the code commitment of the provided account does not match the code commitment of the + /// [`AggLayerBridge`]. + fn assert_bridge_account(account: &Account) -> Result<(), AgglayerBridgeError> { + // check that the storage slots are as expected + Self::assert_storage_slots(account)?; + + // check that the code commitment matches the code commitment of the bridge account + Self::assert_code_commitment(account)?; + + Ok(()) + } + + /// Checks that the provided account has all storage slots required for the [`AggLayerBridge`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - provided account does not have all AggLayer Bridge specific storage slots. + fn assert_storage_slots(account: &Account) -> Result<(), AgglayerBridgeError> { + // get the storage slot names of the provided account + let account_storage_slot_names: Vec<&StorageSlotName> = account + .storage() + .slots() + .iter() + .map(|storage_slot| storage_slot.name()) + .collect::>(); + + // check that all bridge specific storage slots are presented in the provided account + let are_slots_present = Self::slot_names() + .iter() + .all(|slot_name| account_storage_slot_names.contains(slot_name)); + if !are_slots_present { + return Err(AgglayerBridgeError::StorageSlotsMismatch); + } + + Ok(()) + } + + /// Checks that the code commitment of the provided account matches the code commitment of the + /// [`AggLayerBridge`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - the code commitment of the provided account does not match the code commitment of the + /// [`AggLayerBridge`]. + fn assert_code_commitment(account: &Account) -> Result<(), AgglayerBridgeError> { + if BRIDGE_CODE_COMMITMENT != account.code().commitment() { + return Err(AgglayerBridgeError::CodeCommitmentMismatch); + } + + Ok(()) + } + + /// Returns a vector of all [`AggLayerBridge`] storage slot names. + fn slot_names() -> Vec<&'static StorageSlotName> { + vec![ + &*GER_MAP_SLOT_NAME, + &*LET_FRONTIER_SLOT_NAME, + &*LET_ROOT_LO_SLOT_NAME, + &*LET_ROOT_HI_SLOT_NAME, + &*LET_NUM_LEAVES_SLOT_NAME, + &*FAUCET_REGISTRY_MAP_SLOT_NAME, + &*TOKEN_REGISTRY_MAP_SLOT_NAME, + &*BRIDGE_ADMIN_ID_SLOT_NAME, + &*GER_MANAGER_ID_SLOT_NAME, + &*CGI_CHAIN_HASH_LO_SLOT_NAME, + &*CGI_CHAIN_HASH_HI_SLOT_NAME, + &*CLAIM_NULLIFIERS_SLOT_NAME, + ] + } +} + +impl From for AccountComponent { + fn from(bridge: AggLayerBridge) -> Self { + let bridge_admin_word = AccountIdKey::new(bridge.bridge_admin_id).as_word(); + let ger_manager_word = AccountIdKey::new(bridge.ger_manager_id).as_word(); + + let bridge_storage_slots = vec![ + StorageSlot::with_empty_map(GER_MAP_SLOT_NAME.clone()), + StorageSlot::with_empty_map(LET_FRONTIER_SLOT_NAME.clone()), + StorageSlot::with_value(LET_ROOT_LO_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_value(LET_ROOT_HI_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_value(LET_NUM_LEAVES_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_empty_map(FAUCET_REGISTRY_MAP_SLOT_NAME.clone()), + StorageSlot::with_empty_map(TOKEN_REGISTRY_MAP_SLOT_NAME.clone()), + StorageSlot::with_value(BRIDGE_ADMIN_ID_SLOT_NAME.clone(), bridge_admin_word), + StorageSlot::with_value(GER_MANAGER_ID_SLOT_NAME.clone(), ger_manager_word), + StorageSlot::with_value(CGI_CHAIN_HASH_LO_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_value(CGI_CHAIN_HASH_HI_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_empty_map(CLAIM_NULLIFIERS_SLOT_NAME.clone()), + ]; + bridge_component(bridge_storage_slots) + } +} + +// AGGLAYER BRIDGE ERROR +// ================================================================================================ + +/// AggLayer Bridge related errors. +#[derive(Debug, Error)] +pub enum AgglayerBridgeError { + #[error( + "provided account does not have storage slots required for the AggLayer Bridge account" + )] + StorageSlotsMismatch, + #[error( + "the code commitment of the provided account does not match the code commitment of the AggLayer Bridge account" + )] + CodeCommitmentMismatch, +} + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Creates an AggLayer Bridge component with the specified storage slots. +fn bridge_component(storage_slots: Vec) -> AccountComponent { + let library = agglayer_bridge_component_library(); + let metadata = AccountComponentMetadata::new("agglayer::bridge", AccountType::all()) + .with_description("Bridge component for AggLayer"); + + AccountComponent::new(library, storage_slots, metadata) + .expect("bridge component should satisfy the requirements of a valid account component") +} diff --git a/crates/miden-agglayer/src/claim_note.rs b/crates/miden-agglayer/src/claim_note.rs index 0e919fd28f..1cd3d8dc53 100644 --- a/crates/miden-agglayer/src/claim_note.rs +++ b/crates/miden-agglayer/src/claim_note.rs @@ -2,34 +2,26 @@ use alloc::string::ToString; use alloc::vec; use alloc::vec::Vec; -use miden_core::{Felt, FieldElement, Word}; +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_core::{Felt, Word}; use miden_protocol::account::AccountId; use miden_protocol::crypto::SequentialCommit; use miden_protocol::crypto::rand::FeltRng; use miden_protocol::errors::NoteError; -use miden_protocol::note::{ - Note, - NoteAssets, - NoteMetadata, - NoteRecipient, - NoteStorage, - NoteTag, - NoteType, -}; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteRecipient, NoteStorage, NoteType}; use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; -use crate::utils::bytes32_to_felts; -use crate::{EthAddressFormat, EthAmount, claim_script}; +use crate::{EthAddressFormat, EthAmount, GlobalIndex, MetadataHash, claim_script}; // CLAIM NOTE STRUCTURES // ================================================================================================ -/// SMT node representation (32-byte hash) +/// Keccak256 output representation (32-byte hash) #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct SmtNode([u8; 32]); +pub struct Keccak256Output([u8; 32]); -impl SmtNode { - /// Creates a new SMT node from a 32-byte array +impl Keccak256Output { + /// Creates a new Keccak256 output from a 32-byte array pub fn new(bytes: [u8; 32]) -> Self { Self(bytes) } @@ -39,45 +31,37 @@ impl SmtNode { &self.0 } - /// Converts the SMT node to 8 Felt elements (32-byte value as 8 u32 values in big-endian) - pub fn to_elements(&self) -> [Felt; 8] { - bytes32_to_felts(&self.0) - } -} - -impl From<[u8; 32]> for SmtNode { - fn from(bytes: [u8; 32]) -> Self { - Self::new(bytes) - } -} - -/// Exit root representation (32-byte hash) -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct ExitRoot([u8; 32]); - -impl ExitRoot { - /// Creates a new exit root from a 32-byte array - pub fn new(bytes: [u8; 32]) -> Self { - Self(bytes) - } - - /// Returns the inner 32-byte array - pub fn as_bytes(&self) -> &[u8; 32] { - &self.0 + /// Converts the Keccak256 output to 8 Felt elements (32-byte value as 8 u32 values in + /// little-endian) + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) } - /// Converts the exit root to 8 Felt elements - pub fn to_elements(&self) -> [Felt; 8] { - bytes32_to_felts(&self.0) + /// Converts the Keccak256 output to two [`Word`]s: `[lo, hi]`. + /// + /// - `lo` contains the first 4 u32-packed felts (bytes 0..16). + /// - `hi` contains the last 4 u32-packed felts (bytes 16..32). + #[cfg(any(test, feature = "testing"))] + pub fn to_words(&self) -> [Word; 2] { + let elements = self.to_elements(); + let lo: [Felt; 4] = elements[0..4].try_into().expect("to_elements returns 8 felts"); + let hi: [Felt; 4] = elements[4..8].try_into().expect("to_elements returns 8 felts"); + [Word::new(lo), Word::new(hi)] } } -impl From<[u8; 32]> for ExitRoot { +impl From<[u8; 32]> for Keccak256Output { fn from(bytes: [u8; 32]) -> Self { Self::new(bytes) } } +/// SMT node representation (32-byte Keccak256 hash) +pub type SmtNode = Keccak256Output; + +/// Exit root representation (32-byte Keccak256 hash) +pub type ExitRoot = Keccak256Output; + /// Proof data for CLAIM note creation. /// Contains SMT proofs and root hashes using typed representations. #[derive(Clone)] @@ -86,8 +70,8 @@ pub struct ProofData { pub smt_proof_local_exit_root: [SmtNode; 32], /// SMT proof for rollup exit root (32 SMT nodes) pub smt_proof_rollup_exit_root: [SmtNode; 32], - /// Global index (uint256 as 8 u32 values) - pub global_index: [u32; 8], + /// Global index (uint256 as 32 bytes) + pub global_index: GlobalIndex, /// Mainnet exit root hash pub mainnet_exit_root: ExitRoot, /// Rollup exit root hash @@ -103,25 +87,19 @@ impl SequentialCommit for ProofData { // Convert SMT proof elements to felts (each node is 8 felts) for node in self.smt_proof_local_exit_root.iter() { - let node_felts = node.to_elements(); - elements.extend(node_felts); + elements.extend(node.to_elements()); } for node in self.smt_proof_rollup_exit_root.iter() { - let node_felts = node.to_elements(); - elements.extend(node_felts); + elements.extend(node.to_elements()); } - // Global index (uint256 as 8 u32 felts) - elements.extend(self.global_index.iter().map(|&v| Felt::new(v as u64))); - - // Mainnet exit root (bytes32 as 8 u32 felts) - let mainnet_exit_root_felts = self.mainnet_exit_root.to_elements(); - elements.extend(mainnet_exit_root_felts); + // Global index (uint256 as 32 bytes) + elements.extend(self.global_index.to_elements()); - // Rollup exit root (bytes32 as 8 u32 felts) - let rollup_exit_root_felts = self.rollup_exit_root.to_elements(); - elements.extend(rollup_exit_root_felts); + // Mainnet and rollup exit roots + elements.extend(self.mainnet_exit_root.to_elements()); + elements.extend(self.rollup_exit_root.to_elements()); elements } @@ -141,15 +119,15 @@ pub struct LeafData { pub destination_address: EthAddressFormat, /// Amount of tokens (uint256) pub amount: EthAmount, - /// ABI encoded metadata (fixed size of 8 u32 values) - pub metadata: [u32; 8], + /// Metadata hash (32 bytes) + pub metadata_hash: MetadataHash, } impl SequentialCommit for LeafData { type Commitment = Word; fn to_elements(&self) -> Vec { - const LEAF_DATA_ELEMENT_COUNT: usize = 32; // 1 + 3 + 1 + 5 + 1 + 5 + 8 + 8 (leafType + padding + networks + addresses + amount + metadata) + const LEAF_DATA_ELEMENT_COUNT: usize = 32; // 1 + 1 + 5 + 1 + 5 + 8 + 8 + 3 (leafType + networks + addresses + amount + metadata + padding) let mut elements = Vec::with_capacity(LEAF_DATA_ELEMENT_COUNT); // LeafType (uint32 as Felt): 0u32 for transfer Ether / ERC20 tokens, 1u32 for message @@ -157,17 +135,16 @@ impl SequentialCommit for LeafData { // for a `CLAIM` note, leafType is always 0 (transfer Ether / ERC20 tokens) elements.push(Felt::ZERO); - // Padding - elements.extend(vec![Felt::ZERO; 3]); - - // Origin network - elements.push(Felt::new(self.origin_network as u64)); + // Origin network (encode as little-endian bytes for keccak) + let origin_network = u32::from_le_bytes(self.origin_network.to_be_bytes()); + elements.push(Felt::from(origin_network)); // Origin token address (5 u32 felts) elements.extend(self.origin_token_address.to_elements()); - // Destination network - elements.push(Felt::new(self.destination_network as u64)); + // Destination network (encode as little-endian bytes for keccak) + let destination_network = u32::from_le_bytes(self.destination_network.to_be_bytes()); + elements.push(Felt::from(destination_network)); // Destination address (5 u32 felts) elements.extend(self.destination_address.to_elements()); @@ -175,44 +152,11 @@ impl SequentialCommit for LeafData { // Amount (uint256 as 8 u32 felts) elements.extend(self.amount.to_elements()); - // Metadata (8 u32 felts) - elements.extend(self.metadata.iter().map(|&v| Felt::new(v as u64))); - - elements - } -} - -/// Output note data for CLAIM note creation. -/// Contains note-specific data and can use Miden types. -/// TODO: Remove all but target_faucet_account_id -#[derive(Clone)] -pub struct OutputNoteData { - /// P2ID note serial number (4 felts as Word) - pub output_p2id_serial_num: Word, - /// Target agg faucet account ID (2 felts: prefix and suffix) - pub target_faucet_account_id: AccountId, - /// P2ID output note tag - pub output_note_tag: NoteTag, -} - -impl OutputNoteData { - /// Converts the output note data to a vector of field elements for note storage - pub fn to_elements(&self) -> Vec { - const OUTPUT_NOTE_DATA_ELEMENT_COUNT: usize = 8; // 4 + 2 + 1 + 1 (serial_num + account_id + tag + padding) - let mut elements = Vec::with_capacity(OUTPUT_NOTE_DATA_ELEMENT_COUNT); - - // P2ID note serial number (4 felts as Word) - elements.extend(self.output_p2id_serial_num); - - // Target faucet account ID (2 felts: prefix and suffix) - elements.push(self.target_faucet_account_id.prefix().as_felt()); - elements.push(self.target_faucet_account_id.suffix()); - - // Output note tag - elements.push(Felt::new(self.output_note_tag.as_u32() as u64)); + // Metadata hash (8 u32 felts) + elements.extend(self.metadata_hash.to_elements()); // Padding - elements.extend(vec![Felt::ZERO; 1]); + elements.extend(vec![Felt::ZERO; 3]); elements } @@ -228,21 +172,21 @@ pub struct ClaimNoteStorage { pub proof_data: ProofData, /// Leaf data containing network, address, amount, and metadata pub leaf_data: LeafData, - /// Output note data containing note-specific information - pub output_note_data: OutputNoteData, + /// Miden claim amount (scaled-down token amount as Felt) + pub miden_claim_amount: Felt, } impl TryFrom for NoteStorage { type Error = NoteError; fn try_from(storage: ClaimNoteStorage) -> Result { - // proof_data + leaf_data + empty_word + output_note_data - // 536 + 32 + 8 - let mut claim_storage = Vec::with_capacity(576); + // proof_data + leaf_data + miden_claim_amount + // 536 + 32 + 1 + let mut claim_storage = Vec::with_capacity(569); claim_storage.extend(storage.proof_data.to_elements()); claim_storage.extend(storage.leaf_data.to_elements()); - claim_storage.extend(storage.output_note_data.to_elements()); + claim_storage.push(storage.miden_claim_amount); NoteStorage::new(claim_storage) } @@ -251,10 +195,13 @@ impl TryFrom for NoteStorage { // CLAIM NOTE CREATION // ================================================================================================ -/// Generates a CLAIM note - a note that instructs an agglayer faucet to validate and mint assets. +/// Generates a CLAIM note - a note that instructs the bridge to validate a claim and create +/// a MINT note for the aggfaucet. /// /// # Parameters /// - `storage`: The core storage for creating the CLAIM note +/// - `target_bridge_id`: The account ID of the bridge that should consume this note. Encoded as a +/// `NetworkAccountTarget` attachment on the note metadata. /// - `sender_account_id`: The account ID of the CLAIM note creator /// - `rng`: Random number generator for creating the CLAIM note serial number /// @@ -262,17 +209,15 @@ impl TryFrom for NoteStorage { /// Returns an error if note creation fails. pub fn create_claim_note( storage: ClaimNoteStorage, + target_bridge_id: AccountId, sender_account_id: AccountId, rng: &mut R, ) -> Result { let note_storage = NoteStorage::try_from(storage.clone())?; - let attachment = NetworkAccountTarget::new( - storage.output_note_data.target_faucet_account_id, - NoteExecutionHint::Always, - ) - .map_err(|e| NoteError::other(e.to_string()))? - .into(); + let attachment = NetworkAccountTarget::new(target_bridge_id, NoteExecutionHint::Always) + .map_err(|e| NoteError::other(e.to_string()))? + .into(); let metadata = NoteMetadata::new(sender_account_id, NoteType::Public).with_attachment(attachment); diff --git a/crates/miden-agglayer/src/config_note.rs b/crates/miden-agglayer/src/config_note.rs new file mode 100644 index 0000000000..c5b2105268 --- /dev/null +++ b/crates/miden-agglayer/src/config_note.rs @@ -0,0 +1,127 @@ +//! CONFIG_AGG_BRIDGE note creation utilities. +//! +//! This module provides helpers for creating CONFIG_AGG_BRIDGE notes, +//! which are used to register faucets in the bridge's faucet registry. + +extern crate alloc; + +use alloc::string::ToString; +use alloc::vec; +use alloc::vec::Vec; + +use miden_assembly::serde::Deserializable; +use miden_core::{Felt, Word}; +use miden_protocol::account::AccountId; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteType, +}; +use miden_protocol::vm::Program; +use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; +use miden_utils_sync::LazyLock; + +use crate::EthAddressFormat; + +// NOTE SCRIPT +// ================================================================================================ + +// Initialize the CONFIG_AGG_BRIDGE note script only once +static CONFIG_AGG_BRIDGE_SCRIPT: LazyLock = LazyLock::new(|| { + let bytes = + include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/CONFIG_AGG_BRIDGE.masb")); + let program = + Program::read_from_bytes(bytes).expect("shipped CONFIG_AGG_BRIDGE script is well-formed"); + NoteScript::new(program) +}); + +// CONFIG_AGG_BRIDGE NOTE +// ================================================================================================ + +/// CONFIG_AGG_BRIDGE note. +/// +/// This note is used to register a faucet in the bridge's faucet and token registries. +/// It carries the origin token address and faucet account ID, and is always public. +pub struct ConfigAggBridgeNote; + +impl ConfigAggBridgeNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items for a CONFIG_AGG_BRIDGE note. + /// Layout: [origin_token_addr(5), faucet_id_suffix, faucet_id_prefix] + pub const NUM_STORAGE_ITEMS: usize = 7; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the CONFIG_AGG_BRIDGE note script. + pub fn script() -> NoteScript { + CONFIG_AGG_BRIDGE_SCRIPT.clone() + } + + /// Returns the CONFIG_AGG_BRIDGE note script root. + pub fn script_root() -> Word { + CONFIG_AGG_BRIDGE_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Creates a CONFIG_AGG_BRIDGE note to register a faucet in the bridge's registry. + /// + /// The note storage contains 7 felts: + /// - `origin_token_addr[0..5]`: The 5 u32 felts of the origin EVM token address + /// - `faucet_id_suffix`: The suffix of the faucet account ID + /// - `faucet_id_prefix`: The prefix of the faucet account ID + /// + /// # Parameters + /// - `faucet_account_id`: The account ID of the faucet to register + /// - `origin_token_address`: The origin EVM token address for the token registry + /// - `sender_account_id`: The account ID of the note creator + /// - `target_account_id`: The bridge account ID that will consume this note + /// - `rng`: Random number generator for creating the note serial number + /// + /// # Errors + /// Returns an error if note creation fails. + pub fn create( + faucet_account_id: AccountId, + origin_token_address: &EthAddressFormat, + sender_account_id: AccountId, + target_account_id: AccountId, + rng: &mut R, + ) -> Result { + // Create note storage with 7 felts: [origin_token_addr(5), faucet_id_suffix, + // faucet_id_prefix] + let addr_elements = origin_token_address.to_elements(); + let mut storage_values: Vec = addr_elements; + storage_values.push(faucet_account_id.suffix()); + storage_values.push(faucet_account_id.prefix().as_felt()); + + let note_storage = NoteStorage::new(storage_values)?; + + // Generate a serial number for the note + let serial_num = rng.draw_word(); + + let recipient = NoteRecipient::new(serial_num, Self::script(), note_storage); + + let attachment = NoteAttachment::from( + NetworkAccountTarget::new(target_account_id, NoteExecutionHint::Always) + .map_err(|e| NoteError::other(e.to_string()))?, + ); + let metadata = + NoteMetadata::new(sender_account_id, NoteType::Public).with_attachment(attachment); + + // CONFIG_AGG_BRIDGE notes don't carry assets + let assets = NoteAssets::new(vec![])?; + + Ok(Note::new(assets, metadata, recipient)) + } +} diff --git a/crates/miden-agglayer/src/errors/agglayer.rs b/crates/miden-agglayer/src/errors/agglayer.rs index a1874001d9..6ccc7d6f57 100644 --- a/crates/miden-agglayer/src/errors/agglayer.rs +++ b/crates/miden-agglayer/src/errors/agglayer.rs @@ -9,9 +9,6 @@ use miden_protocol::errors::MasmError; // AGGLAYER ERRORS // ================================================================================================ -/// Error Message: "most-significant 4 bytes (addr4) must be zero" -pub const ERR_ADDR4_NONZERO: MasmError = MasmError::from_static_str("most-significant 4 bytes (addr4) must be zero"); - /// Error Message: "B2AGG note attachment target account does not match consuming account" pub const ERR_B2AGG_TARGET_ACCOUNT_MISMATCH: MasmError = MasmError::from_static_str("B2AGG note attachment target account does not match consuming account"); /// Error Message: "B2AGG script expects exactly 6 note storage items" @@ -19,37 +16,78 @@ pub const ERR_B2AGG_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::f /// Error Message: "B2AGG script requires exactly 1 note asset" pub const ERR_B2AGG_WRONG_NUMBER_OF_ASSETS: MasmError = MasmError::from_static_str("B2AGG script requires exactly 1 note asset"); -/// Error Message: "bridge not mainnet" -pub const ERR_BRIDGE_NOT_MAINNET: MasmError = MasmError::from_static_str("bridge not mainnet"); +/// Error Message: "mainnet flag must be 1 for a mainnet deposit" +pub const ERR_BRIDGE_NOT_MAINNET: MasmError = MasmError::from_static_str("mainnet flag must be 1 for a mainnet deposit"); +/// Error Message: "mainnet flag must be 0 for a rollup deposit" +pub const ERR_BRIDGE_NOT_ROLLUP: MasmError = MasmError::from_static_str("mainnet flag must be 0 for a rollup deposit"); + +/// Error Message: "claim note has already been spent" +pub const ERR_CLAIM_ALREADY_SPENT: MasmError = MasmError::from_static_str("claim note has already been spent"); +/// Error Message: "CLAIM note attachment target account does not match consuming account" +pub const ERR_CLAIM_TARGET_ACCT_MISMATCH: MasmError = MasmError::from_static_str("CLAIM note attachment target account does not match consuming account"); + +/// Error Message: "CONFIG_AGG_BRIDGE note attachment target account does not match consuming account" +pub const ERR_CONFIG_AGG_BRIDGE_TARGET_ACCOUNT_MISMATCH: MasmError = MasmError::from_static_str("CONFIG_AGG_BRIDGE note attachment target account does not match consuming account"); +/// Error Message: "CONFIG_AGG_BRIDGE expects exactly 7 note storage items" +pub const ERR_CONFIG_AGG_BRIDGE_UNEXPECTED_STORAGE_ITEMS: MasmError = MasmError::from_static_str("CONFIG_AGG_BRIDGE expects exactly 7 note storage items"); -/// Error Message: "CLAIM's target account address and transaction address do not match" -pub const ERR_CLAIM_TARGET_ACCT_MISMATCH: MasmError = MasmError::from_static_str("CLAIM's target account address and transaction address do not match"); +/// Error Message: "faucet is not registered in the bridge's faucet registry" +pub const ERR_FAUCET_NOT_REGISTERED: MasmError = MasmError::from_static_str("faucet is not registered in the bridge's faucet registry"); /// Error Message: "combined u64 doesn't fit in field" pub const ERR_FELT_OUT_OF_FIELD: MasmError = MasmError::from_static_str("combined u64 doesn't fit in field"); -/// Error Message: "invalid claim proof" -pub const ERR_INVALID_CLAIM_PROOF: MasmError = MasmError::from_static_str("invalid claim proof"); +/// Error Message: "GER not found in storage" +pub const ERR_GER_NOT_FOUND: MasmError = MasmError::from_static_str("GER not found in storage"); /// Error Message: "leading bits of global index must be zero" pub const ERR_LEADING_BITS_NON_ZERO: MasmError = MasmError::from_static_str("leading bits of global index must be zero"); +/// Error Message: "mainnet flag must be 0 or 1" +pub const ERR_MAINNET_FLAG_INVALID: MasmError = MasmError::from_static_str("mainnet flag must be 0 or 1"); + /// Error Message: "number of leaves in the MMR of the MMR Frontier would exceed 4294967295 (2^32 - 1)" pub const ERR_MMR_FRONTIER_LEAVES_NUM_EXCEED_LIMIT: MasmError = MasmError::from_static_str("number of leaves in the MMR of the MMR Frontier would exceed 4294967295 (2^32 - 1)"); +/// Error Message: "most-significant 4 bytes must be zero for AccountId" +pub const ERR_MSB_NONZERO: MasmError = MasmError::from_static_str("most-significant 4 bytes must be zero for AccountId"); + /// Error Message: "address limb is not u32" pub const ERR_NOT_U32: MasmError = MasmError::from_static_str("address limb is not u32"); +/// Error Message: "remainder z must be < 10^s" +pub const ERR_REMAINDER_TOO_LARGE: MasmError = MasmError::from_static_str("remainder z must be < 10^s"); + /// Error Message: "rollup index must be zero for a mainnet deposit" pub const ERR_ROLLUP_INDEX_NON_ZERO: MasmError = MasmError::from_static_str("rollup index must be zero for a mainnet deposit"); /// Error Message: "maximum scaling factor is 18" pub const ERR_SCALE_AMOUNT_EXCEEDED_LIMIT: MasmError = MasmError::from_static_str("maximum scaling factor is 18"); +/// Error Message: "note sender is not the bridge admin" +pub const ERR_SENDER_NOT_BRIDGE_ADMIN: MasmError = MasmError::from_static_str("note sender is not the bridge admin"); +/// Error Message: "note sender is not the global exit root manager" +pub const ERR_SENDER_NOT_GER_MANAGER: MasmError = MasmError::from_static_str("note sender is not the global exit root manager"); + /// Error Message: "merkle proof verification failed: provided SMT root does not match the computed root" pub const ERR_SMT_ROOT_VERIFICATION_FAILED: MasmError = MasmError::from_static_str("merkle proof verification failed: provided SMT root does not match the computed root"); +/// Error Message: "source bridge network overflowed u32" +pub const ERR_SOURCE_BRIDGE_NETWORK_OVERFLOW: MasmError = MasmError::from_static_str("source bridge network overflowed u32"); + +/// Error Message: "token address is not registered in the bridge's token registry" +pub const ERR_TOKEN_NOT_REGISTERED: MasmError = MasmError::from_static_str("token address is not registered in the bridge's token registry"); + +/// Error Message: "x < y*10^s (underflow detected)" +pub const ERR_UNDERFLOW: MasmError = MasmError::from_static_str("x < y*10^s (underflow detected)"); + /// Error Message: "UPDATE_GER note attachment target account does not match consuming account" pub const ERR_UPDATE_GER_TARGET_ACCOUNT_MISMATCH: MasmError = MasmError::from_static_str("UPDATE_GER note attachment target account does not match consuming account"); /// Error Message: "UPDATE_GER script expects exactly 8 note storage items" pub const ERR_UPDATE_GER_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("UPDATE_GER script expects exactly 8 note storage items"); + +/// Error Message: "the agglayer bridge in u256 value is larger than 2**128 and cannot be verifiably scaled to u64" +pub const ERR_X_TOO_LARGE: MasmError = MasmError::from_static_str("the agglayer bridge in u256 value is larger than 2**128 and cannot be verifiably scaled to u64"); + +/// Error Message: "y exceeds max fungible token amount" +pub const ERR_Y_TOO_LARGE: MasmError = MasmError::from_static_str("y exceeds max fungible token amount"); diff --git a/crates/miden-agglayer/src/eth_types/address.rs b/crates/miden-agglayer/src/eth_types/address.rs index f2a94ed6df..8b489badf8 100644 --- a/crates/miden-agglayer/src/eth_types/address.rs +++ b/crates/miden-agglayer/src/eth_types/address.rs @@ -1,8 +1,9 @@ use alloc::format; use alloc::string::{String, ToString}; +use alloc::vec::Vec; use core::fmt; -use miden_core::FieldElement; +use miden_core::utils::bytes_to_packed_u32_elements; use miden_protocol::Felt; use miden_protocol::account::AccountId; use miden_protocol::utils::{HexParseError, bytes_to_hex_string, hex_to_bytes}; @@ -17,15 +18,16 @@ use miden_protocol::utils::{HexParseError, bytes_to_hex_string, hex_to_bytes}; /// /// - Raw bytes: `[u8; 20]` in the conventional Ethereum big-endian byte order (`bytes[0]` is the /// most-significant byte). -/// - MASM "address\[5\]" limbs: 5 x u32 limbs in *little-endian limb order*: -/// - addr0 = bytes[16..19] (least-significant 4 bytes) -/// - addr1 = bytes[12..15] -/// - addr2 = bytes[ 8..11] -/// - addr3 = bytes[ 4.. 7] -/// - addr4 = bytes[ 0.. 3] (most-significant 4 bytes) +/// - MASM "address\[5\]" limbs: 5 x u32 limbs in *big-endian limb order* (each limb encodes its 4 +/// bytes in little-endian order so felts map to keccak bytes directly): +/// - `address[0]` = bytes[0..4] (most-significant 4 bytes, zero for embedded AccountId) +/// - `address[1]` = bytes[4..8] +/// - `address[2]` = bytes[8..12] +/// - `address[3]` = bytes[12..16] +/// - `address[4]` = bytes[16..20] (least-significant 4 bytes) /// - Embedded AccountId format: `0x00000000 || prefix(8) || suffix(8)`, where: -/// - prefix = (addr3 << 32) | addr2 = bytes[4..11] as a big-endian u64 -/// - suffix = (addr1 << 32) | addr0 = bytes[12..19] as a big-endian u64 +/// - prefix = bytes[4..12] as a big-endian u64 +/// - suffix = bytes[12..20] as a big-endian u64 /// /// Note: prefix/suffix are *conceptual* 64-bit words; when converting to [`Felt`], we must ensure /// `Felt::new(u64)` does not reduce mod p (checked explicitly in `to_account_id`). @@ -80,8 +82,8 @@ impl EthAddressFormat { let felts: [Felt; 2] = account_id.into(); let mut out = [0u8; 20]; - out[4..12].copy_from_slice(&felts[0].as_int().to_be_bytes()); - out[12..20].copy_from_slice(&felts[1].as_int().to_be_bytes()); + out[4..12].copy_from_slice(&felts[0].as_canonical_u64().to_be_bytes()); + out[12..20].copy_from_slice(&felts[1].as_canonical_u64().to_be_bytes()); Self(out) } @@ -104,31 +106,22 @@ impl EthAddressFormat { // INTERNAL API - For CLAIM note processing // -------------------------------------------------------------------------------------------- - /// Converts the Ethereum address format into an array of 5 [`Felt`] values for MASM processing. + /// Converts the Ethereum address format into an array of 5 [`Felt`] values for Miden VM. /// /// **Internal API**: This function is used internally during CLAIM note processing to convert - /// the address format into the MASM `address[5]` representation expected by the + /// the address into the MASM `address[5]` representation expected by the /// `to_account_id` procedure. /// - /// The returned order matches the MASM `address\[5\]` convention (*little-endian limb order*): - /// - addr0 = bytes[16..19] (least-significant 4 bytes) - /// - addr1 = bytes[12..15] - /// - addr2 = bytes[ 8..11] - /// - addr3 = bytes[ 4.. 7] - /// - addr4 = bytes[ 0.. 3] (most-significant 4 bytes) + /// The returned order matches the Solidity ABI encoding convention (*big-endian limb order*): + /// - `address[0]` = bytes[0..4] (most-significant 4 bytes, zero for embedded AccountId) + /// - `address[1]` = bytes[4..8] + /// - `address[2]` = bytes[8..12] + /// - `address[3]` = bytes[12..16] + /// - `address[4]` = bytes[16..20] (least-significant 4 bytes) /// - /// Each limb is interpreted as a big-endian `u32` and stored in a [`Felt`]. - pub fn to_elements(&self) -> [Felt; 5] { - let mut result = [Felt::ZERO; 5]; - - // i=0 -> bytes[16..20], i=4 -> bytes[0..4] - for (felt, chunk) in result.iter_mut().zip(self.0.chunks(4).skip(1).rev()) { - let value = u32::from_be_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]); - // u32 values always fit in Felt, so this conversion is safe - *felt = Felt::try_from(value as u64).expect("u32 value should always fit in Felt"); - } - - result + /// Each limb is interpreted as a little-endian `u32` and stored in a [`Felt`]. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) } /// Converts the Ethereum address format back to an [`AccountId`]. @@ -153,7 +146,7 @@ impl EthAddressFormat { let suffix_felt = Felt::try_from(suffix).map_err(|_| AddressConversionError::FeltOutOfField)?; - AccountId::try_from([prefix_felt, suffix_felt]) + AccountId::try_from_elements(suffix_felt, prefix_felt) .map_err(|_| AddressConversionError::InvalidAccountId) } @@ -162,7 +155,7 @@ impl EthAddressFormat { /// Convert `[u8; 20]` -> `(prefix, suffix)` by extracting the last 16 bytes. /// Requires the first 4 bytes be zero. - /// Returns prefix and suffix values that match the MASM little-endian limb implementation: + /// Returns prefix and suffix values that match the MASM little-endian limb byte encoding: /// - prefix = bytes[4..12] as big-endian u64 = (addr3 << 32) | addr2 /// - suffix = bytes[12..20] as big-endian u64 = (addr1 << 32) | addr0 fn bytes20_to_prefix_suffix(bytes: [u8; 20]) -> Result<(u64, u64), AddressConversionError> { diff --git a/crates/miden-agglayer/src/eth_types/amount.rs b/crates/miden-agglayer/src/eth_types/amount.rs index dc0b9948cd..9fda836856 100644 --- a/crates/miden-agglayer/src/eth_types/amount.rs +++ b/crates/miden-agglayer/src/eth_types/amount.rs @@ -1,27 +1,30 @@ -use core::fmt; +use alloc::vec::Vec; -use miden_core::FieldElement; +use miden_core::utils::bytes_to_packed_u32_elements; use miden_protocol::Felt; +use miden_protocol::asset::FungibleAsset; +use primitive_types::U256; +use thiserror::Error; // ================================================================================================ // ETHEREUM AMOUNT ERROR // ================================================================================================ /// Error type for Ethereum amount conversions. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Error)] pub enum EthAmountError { /// The amount doesn't fit in the target type. + #[error("amount overflow: value doesn't fit in target type")] Overflow, -} - -impl fmt::Display for EthAmountError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - EthAmountError::Overflow => { - write!(f, "amount overflow: value doesn't fit in target type") - }, - } - } + /// The scaling factor is too large (> 18). + #[error("scaling factor too large: maximum is 18")] + ScaleTooLarge, + /// The scaled-down value doesn't fit in a u64. + #[error("scaled value doesn't fit in u64")] + ScaledValueDoesNotFitU64, + /// The scaled-down value exceeds the maximum fungible token amount. + #[error("scaled value exceeds the maximum fungible token amount")] + ScaledValueExceedsMaxFungibleAmount, } // ================================================================================================ @@ -33,119 +36,111 @@ impl fmt::Display for EthAmountError { /// This type provides a more typed representation of Ethereum amounts compared to raw `[u32; 8]` /// arrays, while maintaining compatibility with the existing MASM processing pipeline. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct EthAmount([u32; 8]); +pub struct EthAmount([u8; 32]); impl EthAmount { - /// Creates a new [`EthAmount`] from an array of 8 u32 values. - /// - /// The values are stored in little-endian order where `values[0]` contains - /// the least significant 32 bits. - pub const fn new(values: [u32; 8]) -> Self { - Self(values) + /// Creates an [`EthAmount`] from a 32-byte array. + pub fn new(bytes: [u8; 32]) -> Self { + Self(bytes) } - /// Creates an [`EthAmount`] from a single u64 value. + /// Creates an [`EthAmount`] from a decimal (uint) string. /// - /// This is useful for smaller amounts that fit in a u64. The value is - /// stored in the first two u32 slots with the remaining slots set to zero. - pub const fn from_u64(value: u64) -> Self { - let low = value as u32; - let high = (value >> 32) as u32; - Self([low, high, 0, 0, 0, 0, 0, 0]) - } - - /// Creates an [`EthAmount`] from a single u32 value. + /// The string should contain only ASCII decimal digits (e.g. `"2000000000000000000"`). + /// The value is stored as a 32-byte big-endian array, matching the Solidity uint256 layout. /// - /// This is useful for smaller amounts that fit in a u32. The value is - /// stored in the first u32 slot with the remaining slots set to zero. - pub const fn from_u32(value: u32) -> Self { - Self([value, 0, 0, 0, 0, 0, 0, 0]) - } - - /// Returns the raw array of 8 u32 values. - pub const fn as_array(&self) -> &[u32; 8] { - &self.0 - } - - /// Converts the amount into an array of 8 u32 values. - pub const fn into_array(self) -> [u32; 8] { - self.0 + /// # Errors + /// + /// Returns [`EthAmountError`] if the string is empty, contains non-digit characters, + /// or represents a value that overflows uint256. + pub fn from_uint_str(s: &str) -> Result { + let value = U256::from_dec_str(s).map_err(|_| EthAmountError::Overflow)?; + Ok(Self(value.to_big_endian())) } - /// Returns true if the amount is zero. - pub fn is_zero(&self) -> bool { - self.0.iter().all(|&x| x == 0) + /// Converts the EthAmount to a U256 for easier arithmetic operations. + pub fn to_u256(&self) -> U256 { + U256::from_big_endian(&self.0) } - /// Attempts to convert the amount to a u64. + /// Creates an EthAmount from a U256 value. /// - /// # Errors - /// Returns [`EthAmountError::Overflow`] if the amount doesn't fit in a u64 - /// (i.e., if any of the upper 6 u32 values are non-zero). - pub fn try_to_u64(&self) -> Result { - if self.0[2..].iter().any(|&x| x != 0) { - Err(EthAmountError::Overflow) - } else { - Ok((self.0[1] as u64) << 32 | self.0[0] as u64) - } - } - - /// Attempts to convert the amount to a u32. - /// - /// # Errors - /// Returns [`EthAmountError::Overflow`] if the amount doesn't fit in a u32 - /// (i.e., if any of the upper 7 u32 values are non-zero). - pub fn try_to_u32(&self) -> Result { - if self.0[1..].iter().any(|&x| x != 0) { - Err(EthAmountError::Overflow) - } else { - Ok(self.0[0]) - } + /// This constructor is only available in test code to make test arithmetic easier. + #[cfg(any(test, feature = "testing"))] + pub fn from_u256(value: U256) -> Self { + Self(value.to_big_endian()) } /// Converts the amount to a vector of field elements for note storage. /// /// Each u32 value in the amount array is converted to a [`Felt`]. - pub fn to_elements(&self) -> [Felt; 8] { - let mut result = [Felt::ZERO; 8]; - for (i, &value) in self.0.iter().enumerate() { - result[i] = Felt::from(value); - } - result + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) } -} -impl From<[u32; 8]> for EthAmount { - fn from(values: [u32; 8]) -> Self { - Self(values) + /// Returns the raw 32-byte array. + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 } } -impl From for [u32; 8] { - fn from(amount: EthAmount) -> Self { - amount.0 - } -} +// ================================================================================================ +// U256 SCALING DOWN HELPERS +// ================================================================================================ -impl From for EthAmount { - fn from(value: u64) -> Self { - Self::from_u64(value) - } -} +/// Maximum scaling factor for decimal conversions +const MAX_SCALING_FACTOR: u32 = 18; -impl From for EthAmount { - fn from(value: u32) -> Self { - Self::from_u32(value) +/// Calculate 10^scale where scale is a u32 exponent. +/// +/// # Errors +/// Returns [`EthAmountError::ScaleTooLarge`] if scale > 18. +fn pow10_u64(scale: u32) -> Result { + if scale > MAX_SCALING_FACTOR { + return Err(EthAmountError::ScaleTooLarge); } + Ok(10_u64.pow(scale)) } -impl fmt::Display for EthAmount { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // For display purposes, show as a hex string of the full 256-bit value - write!(f, "0x")?; - for &value in self.0.iter().rev() { - write!(f, "{:08x}", value)?; +impl EthAmount { + /// Converts a U256 amount to a Miden Felt by scaling down by 10^scale_exp. + /// + /// This is the deterministic reference implementation that computes: + /// - `y = floor(x / 10^scale_exp)` (the Miden amount as a Felt) + /// + /// # Arguments + /// * `scale_exp` - The scaling exponent (0-18) + /// + /// # Returns + /// The scaled-down Miden amount as a Felt + /// + /// # Errors + /// - [`EthAmountError::ScaleTooLarge`] if scale_exp > 18 + /// - [`EthAmountError::ScaledValueDoesNotFitU64`] if the result doesn't fit in a u64 + /// - [`EthAmountError::ScaledValueExceedsMaxFungibleAmount`] if the scaled value exceeds the + /// maximum fungible token amount + /// + /// # Example + /// ```ignore + /// let eth_amount = EthAmount::from_u64(1_000_000_000_000_000_000); // 1 ETH in wei + /// let miden_amount = eth_amount.scale_to_token_amount(12)?; + /// // Result: 1_000_000 (1e6, Miden representation) + /// ``` + pub fn scale_to_token_amount(&self, scale_exp: u32) -> Result { + let x = self.to_u256(); + let scale = U256::from(pow10_u64(scale_exp)?); + + let y_u256 = x / scale; + + // y must fit into u64; canonical Felt is guaranteed by max amount bound + let y_u64: u64 = y_u256.try_into().map_err(|_| EthAmountError::ScaledValueDoesNotFitU64)?; + + if y_u64 > FungibleAsset::MAX_AMOUNT { + return Err(EthAmountError::ScaledValueExceedsMaxFungibleAmount); } - Ok(()) + + // Safe because FungibleAsset::MAX_AMOUNT < Felt modulus + let y_felt = Felt::try_from(y_u64).expect("scaled value must fit into canonical Felt"); + Ok(y_felt) } } diff --git a/crates/miden-agglayer/src/eth_types/global_index.rs b/crates/miden-agglayer/src/eth_types/global_index.rs new file mode 100644 index 0000000000..35df7e9a85 --- /dev/null +++ b/crates/miden-agglayer/src/eth_types/global_index.rs @@ -0,0 +1,225 @@ +use alloc::vec::Vec; + +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_protocol::Felt; +use miden_protocol::utils::{HexParseError, hex_to_bytes}; + +// ================================================================================================ +// GLOBAL INDEX ERROR +// ================================================================================================ + +/// Error type for GlobalIndex validation. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum GlobalIndexError { + /// The leading 160 bits of the global index are not zero. + LeadingBitsNonZero, + /// The mainnet flag is not a valid boolean (must be exactly 0 or 1). + InvalidMainnetFlag, + /// The rollup index is not zero for a mainnet deposit. + RollupIndexNonZero, +} + +// ================================================================================================ +// GLOBAL INDEX +// ================================================================================================ + +/// Represents an AggLayer global index as a 256-bit value (32 bytes). +/// +/// The global index is a uint256 that encodes (from MSB to LSB): +/// - Top 160 bits (limbs 0-4): must be zero +/// - 32 bits (limb 5): mainnet flag (value = 1 for mainnet, 0 for rollup) +/// - 32 bits (limb 6): rollup index (must be 0 for mainnet deposits) +/// - 32 bits (limb 7): leaf index (deposit index in the local exit tree) +/// +/// Bytes are stored in big-endian order, matching Solidity's uint256 representation. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct GlobalIndex([u8; 32]); + +impl GlobalIndex { + /// Creates a [`GlobalIndex`] from a hex string (with or without "0x" prefix). + /// + /// The hex string should represent a Solidity uint256 in big-endian format + /// (64 hex characters for 32 bytes). + pub fn from_hex(hex_str: &str) -> Result { + let bytes: [u8; 32] = hex_to_bytes(hex_str)?; + Ok(Self(bytes)) + } + + /// Creates a new [`GlobalIndex`] from a 32-byte array (big-endian). + pub fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Validates this global index. + /// + /// Checks that: + /// - The top 160 bits (bytes 0-19) are zero + /// - The mainnet flag (bytes 20-23) is exactly 0 or 1 + /// - For mainnet deposits (flag = 1): the rollup index is 0 + pub fn validate(&self) -> Result<(), GlobalIndexError> { + // Check leading 160 bits are zero + if self.0[0..20].iter().any(|&b| b != 0) { + return Err(GlobalIndexError::LeadingBitsNonZero); + } + + // Check mainnet flag is a valid boolean (exactly 0 or 1) + let flag = self.mainnet_flag(); + if flag > 1 { + return Err(GlobalIndexError::InvalidMainnetFlag); + } + + // For mainnet deposits, rollup index must be zero + if flag == 1 && self.rollup_index() != 0 { + return Err(GlobalIndexError::RollupIndexNonZero); + } + + Ok(()) + } + + /// Returns the raw mainnet flag value (limb 5, bytes 20-23). + /// + /// Valid values are 0 (rollup) or 1 (mainnet). + pub fn mainnet_flag(&self) -> u32 { + u32::from_be_bytes([self.0[20], self.0[21], self.0[22], self.0[23]]) + } + + /// Returns the leaf index (limb 7, lowest 32 bits). + pub fn leaf_index(&self) -> u32 { + u32::from_be_bytes([self.0[28], self.0[29], self.0[30], self.0[31]]) + } + + /// Returns the rollup index (limb 6). + pub fn rollup_index(&self) -> u32 { + u32::from_be_bytes([self.0[24], self.0[25], self.0[26], self.0[27]]) + } + + /// Returns true if this is a mainnet deposit (mainnet flag = 1). + pub fn is_mainnet(&self) -> bool { + self.mainnet_flag() == 1 + } + + /// Converts to field elements for note storage / MASM processing. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) + } + + /// Returns the raw 32-byte array (big-endian). + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_rollup_global_index_validation() { + // Rollup global index: mainnet_flag=0, rollup_index=5, leaf_index=42 + // Format: (rollup_index << 32) | leaf_index + let mut bytes = [0u8; 32]; + // mainnet flag = 0 (bytes 20-23): already zero + // rollup index = 5 (bytes 24-27, BE) + bytes[27] = 5; + // leaf index = 42 (bytes 28-31, BE) + bytes[31] = 42; + + let gi = GlobalIndex::new(bytes); + + assert!(!gi.is_mainnet()); + assert_eq!(gi.rollup_index(), 5); + assert_eq!(gi.leaf_index(), 42); + assert!(gi.validate().is_ok()); + assert!(gi.validate().is_ok()); + } + + #[test] + fn test_rollup_global_index_rejects_leading_bits() { + let mut bytes = [0u8; 32]; + bytes[3] = 1; // non-zero leading bits + bytes[27] = 5; // rollup index = 5 + bytes[31] = 42; // leaf index = 42 + + let gi = GlobalIndex::new(bytes); + assert_eq!(gi.validate(), Err(GlobalIndexError::LeadingBitsNonZero)); + assert_eq!(gi.validate(), Err(GlobalIndexError::LeadingBitsNonZero)); + } + + #[test] + fn test_rollup_global_index_various_indices() { + // Test with larger rollup index and leaf index values + let test_cases = [ + (1u32, 0u32), // first rollup, first leaf + (7, 1000), // rollup 7, leaf 1000 + (100, 999999), // larger values + ]; + + for (rollup_idx, leaf_idx) in test_cases { + let mut bytes = [0u8; 32]; + bytes[24..28].copy_from_slice(&rollup_idx.to_be_bytes()); + bytes[28..32].copy_from_slice(&leaf_idx.to_be_bytes()); + + let gi = GlobalIndex::new(bytes); + assert!(!gi.is_mainnet()); + assert_eq!(gi.rollup_index(), rollup_idx); + assert_eq!(gi.leaf_index(), leaf_idx); + assert!(gi.validate().is_ok()); + } + } + + #[test] + fn test_mainnet_global_indices_from_production() { + // Real mainnet global indices from production + // Format: (1 << 64) + leaf_index for mainnet deposits + // 18446744073709786619 = 0x1_0000_0000_0003_95FB (leaf_index = 235003) + // 18446744073709786590 = 0x1_0000_0000_0003_95DE (leaf_index = 234974) + let test_cases = [ + ("0x00000000000000000000000000000000000000000000000100000000000395fb", 235003u32), + ("0x00000000000000000000000000000000000000000000000100000000000395de", 234974u32), + ]; + + for (hex, expected_leaf_index) in test_cases { + let gi = GlobalIndex::from_hex(hex).expect("valid hex"); + + // Validate as mainnet + assert!(gi.validate().is_ok(), "should be valid mainnet global index"); + + // Construction sanity checks + assert!(gi.is_mainnet()); + assert_eq!(gi.rollup_index(), 0); + assert_eq!(gi.leaf_index(), expected_leaf_index); + + // Verify to_elements produces correct LE-packed u32 felts + // -------------------------------------------------------------------------------- + + let elements = gi.to_elements(); + assert_eq!(elements.len(), 8); + + // leading zeros + assert_eq!(elements[0..5], [Felt::ZERO; 5]); + + // mainnet flag: BE value 1 → LE-packed as 0x01000000 + assert_eq!(elements[5], Felt::new(u32::from_le_bytes(1u32.to_be_bytes()) as u64)); + + // rollup index + assert_eq!(elements[6], Felt::ZERO); + + // leaf index: BE value → LE-packed + assert_eq!( + elements[7], + Felt::new(u32::from_le_bytes(expected_leaf_index.to_be_bytes()) as u64) + ); + } + } + + #[test] + fn test_invalid_mainnet_flag_rejected() { + // mainnet flag = 3 (invalid, must be 0 or 1) + let mut bytes = [0u8; 32]; + bytes[23] = 3; + bytes[31] = 2; + + let gi = GlobalIndex::new(bytes); + assert_eq!(gi.validate(), Err(GlobalIndexError::InvalidMainnetFlag)); + } +} diff --git a/crates/miden-agglayer/src/eth_types/metadata_hash.rs b/crates/miden-agglayer/src/eth_types/metadata_hash.rs new file mode 100644 index 0000000000..d0f02d83fa --- /dev/null +++ b/crates/miden-agglayer/src/eth_types/metadata_hash.rs @@ -0,0 +1,133 @@ +use alloc::vec::Vec; + +use alloy_sol_types::{SolValue, sol}; +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_crypto::hash::keccak::Keccak256; +use miden_protocol::Felt; + +// ================================================================================================ +// METADATA HASH +// ================================================================================================ + +/// Represents a Keccak256 metadata hash as 32 bytes. +/// +/// This type provides a typed representation of metadata hashes for the agglayer bridge, +/// while maintaining compatibility with the existing MASM processing pipeline. +/// +/// The metadata hash is `keccak256(abi.encode(name, symbol, decimals))` where the encoding +/// follows Solidity's `abi.encode` format for `(string, string, uint8)`. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MetadataHash([u8; 32]); + +impl MetadataHash { + /// Creates a new [`MetadataHash`] from a 32-byte array. + pub const fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Computes the metadata hash from raw ABI-encoded metadata bytes. + /// + /// This computes `keccak256(metadata_bytes)`. + pub fn from_abi_encoded(metadata_bytes: &[u8]) -> Self { + let digest = Keccak256::hash(metadata_bytes); + Self(<[u8; 32]>::from(digest)) + } + + /// Computes the metadata hash from token information. + /// + /// This computes `keccak256(abi.encode(name, symbol, decimals))` matching the Solidity + /// bridge's `getTokenMetadata` encoding. + pub fn from_token_info(name: &str, symbol: &str, decimals: u8) -> Self { + let encoded = encode_token_metadata(name, symbol, decimals); + Self::from_abi_encoded(&encoded) + } + + /// Returns the raw 32-byte array. + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } + + /// Converts the metadata hash to 8 Felt elements for MASM processing. + /// + /// Each 4-byte chunk is converted to a u32 using little-endian byte order. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) + } +} + +// ABI ENCODING +// ================================================================================================ + +sol! { + struct SolTokenMetadata { + string name; + string symbol; + uint8 decimals; + } +} + +/// ABI-encodes token metadata as `abi.encode(name, symbol, decimals)`. +/// +/// This produces the same encoding as Solidity's `abi.encode(string, string, uint8)`. +pub(crate) fn encode_token_metadata(name: &str, symbol: &str, decimals: u8) -> Vec { + SolTokenMetadata { + name: name.into(), + symbol: symbol.into(), + decimals, + } + .abi_encode_params() +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + extern crate std; + + use std::path::Path; + + use miden_protocol::utils::hex_to_bytes; + use serde::Deserialize; + + use super::*; + + /// Partial deserialization of claim_asset_vectors_local_tx.json + #[derive(Deserialize)] + struct ClaimAssetVectors { + metadata: std::string::String, + metadata_hash: std::string::String, + } + + fn load_test_vectors() -> ClaimAssetVectors { + let path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("solidity-compat/test-vectors/claim_asset_vectors_local_tx.json"); + let data = std::fs::read_to_string(path).expect("failed to read test vectors"); + serde_json::from_str(&data).expect("failed to parse test vectors") + } + + #[test] + fn test_metadata_hash_matches_solidity() { + let vectors = load_test_vectors(); + let expected_metadata = hex_to_vec(&vectors.metadata[2..]); + let expected_hash: [u8; 32] = + hex_to_bytes(&vectors.metadata_hash).expect("valid metadata_hash hex"); + + // The test vectors use: name="Test Token", symbol="TEST", decimals=18 + let encoded = encode_token_metadata("Test Token", "TEST", 18); + assert_eq!(encoded, expected_metadata, "ABI encoding must match Solidity"); + + let hash = MetadataHash::from_abi_encoded(&encoded); + assert_eq!(hash.as_bytes(), &expected_hash, "keccak256 hash must match Solidity"); + + let hash_from_info = MetadataHash::from_token_info("Test Token", "TEST", 18); + assert_eq!(hash, hash_from_info, "from_abi_encoded and from_token_info must agree"); + } + + fn hex_to_vec(hex: &str) -> std::vec::Vec { + (0..hex.len()) + .step_by(2) + .map(|i| u8::from_str_radix(&hex[i..i + 2], 16).unwrap()) + .collect() + } +} diff --git a/crates/miden-agglayer/src/eth_types/mod.rs b/crates/miden-agglayer/src/eth_types/mod.rs index c8184cbc8d..3bee167e5d 100644 --- a/crates/miden-agglayer/src/eth_types/mod.rs +++ b/crates/miden-agglayer/src/eth_types/mod.rs @@ -1,5 +1,9 @@ pub mod address; pub mod amount; +pub mod global_index; +pub mod metadata_hash; pub use address::EthAddressFormat; pub use amount::{EthAmount, EthAmountError}; +pub use global_index::{GlobalIndex, GlobalIndexError}; +pub use metadata_hash::MetadataHash; diff --git a/crates/miden-agglayer/src/faucet.rs b/crates/miden-agglayer/src/faucet.rs new file mode 100644 index 0000000000..e7d8367ed0 --- /dev/null +++ b/crates/miden-agglayer/src/faucet.rs @@ -0,0 +1,478 @@ +extern crate alloc; + +use alloc::vec; +use alloc::vec::Vec; + +use miden_core::{Felt, Word}; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + Account, + AccountComponent, + AccountId, + AccountType, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::asset::TokenSymbol; +use miden_protocol::errors::AccountIdError; +use miden_standards::account::access::Ownable2Step; +use miden_standards::account::faucets::{FungibleFaucetError, TokenMetadata}; +use miden_standards::account::mint_policies::OwnerControlled; +use miden_utils_sync::LazyLock; +use thiserror::Error; + +use super::agglayer_faucet_component_library; +pub use crate::{ + AggLayerBridge, + B2AggNote, + ClaimNoteStorage, + ConfigAggBridgeNote, + EthAddressFormat, + EthAmount, + EthAmountError, + ExitRoot, + GlobalIndex, + GlobalIndexError, + LeafData, + MetadataHash, + ProofData, + SmtNode, + UpdateGerNote, + create_claim_note, +}; + +// CONSTANTS +// ================================================================================================ +// Include the generated agglayer constants +include!(concat!(env!("OUT_DIR"), "/agglayer_constants.rs")); + +// AGGLAYER FAUCET STRUCT +// ================================================================================================ + +static CONVERSION_INFO_1_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::faucet::conversion_info_1") + .expect("conversion info 1 storage slot name should be valid") +}); +static CONVERSION_INFO_2_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::faucet::conversion_info_2") + .expect("conversion info 2 storage slot name should be valid") +}); +static METADATA_HASH_LO_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::faucet::metadata_hash_lo") + .expect("metadata hash lo storage slot name should be valid") +}); +static METADATA_HASH_HI_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("agglayer::faucet::metadata_hash_hi") + .expect("metadata hash hi storage slot name should be valid") +}); +/// An [`AccountComponent`] implementing the AggLayer Faucet. +/// +/// It reexports the procedures from `agglayer::faucet`. When linking against this +/// component, the `agglayer` library must be available to the assembler. +/// The procedures of this component are: +/// - `distribute`, which mints assets and creates output notes (with owner verification). +/// - `asset_to_origin_asset`, which converts an asset to the origin asset (used in FPI from +/// bridge). +/// - `burn`, which burns an asset. +/// +/// ## Storage Layout +/// +/// - [`Self::metadata_slot`]: Stores [`TokenMetadata`]. +/// - [`Self::conversion_info_1_slot`]: Stores the first 4 felts of the origin token address. +/// - [`Self::conversion_info_2_slot`]: Stores the remaining 5th felt of the origin token address + +/// origin network + scale. +/// - [`Self::metadata_hash_lo_slot`]: Stores the first 4 u32 felts of the metadata hash. +/// - [`Self::metadata_hash_hi_slot`]: Stores the last 4 u32 felts of the metadata hash. +/// +/// ## Required Companion Components +/// +/// This component re-exports `network_fungible::mint_and_send`, which requires: +/// - [`Ownable2Step`]: Provides ownership data (bridge account ID as owner). +/// - [`miden_standards::account::mint_policies::OwnerControlled`]: Provides mint policy management. +/// +/// These must be added as separate components when building the faucet account. +#[derive(Debug, Clone)] +pub struct AggLayerFaucet { + metadata: TokenMetadata, + origin_token_address: EthAddressFormat, + origin_network: u32, + scale: u8, + metadata_hash: MetadataHash, +} + +impl AggLayerFaucet { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new AggLayer faucet component from the given configuration. + /// + /// # Errors + /// Returns an error if: + /// - The decimals parameter exceeds maximum value of [`TokenMetadata::MAX_DECIMALS`]. + /// - The max supply exceeds maximum possible amount for a fungible asset. + /// - The token supply exceeds the max supply. + #[allow(clippy::too_many_arguments)] + pub fn new( + symbol: TokenSymbol, + decimals: u8, + max_supply: Felt, + token_supply: Felt, + origin_token_address: EthAddressFormat, + origin_network: u32, + scale: u8, + metadata_hash: MetadataHash, + ) -> Result { + let metadata = TokenMetadata::with_supply(symbol, decimals, max_supply, token_supply)?; + Ok(Self { + metadata, + origin_token_address, + origin_network, + scale, + metadata_hash, + }) + } + + /// Sets the token supply for an existing faucet (e.g. for testing scenarios). + /// + /// # Errors + /// Returns an error if the token supply exceeds the max supply. + pub fn with_token_supply(mut self, token_supply: Felt) -> Result { + self.metadata = self.metadata.with_token_supply(token_supply)?; + Ok(self) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Storage slot name for [`TokenMetadata`]. + pub fn metadata_slot() -> &'static StorageSlotName { + TokenMetadata::metadata_slot() + } + + /// Storage slot name for the first 4 felts of the origin token address. + pub fn conversion_info_1_slot() -> &'static StorageSlotName { + &CONVERSION_INFO_1_SLOT_NAME + } + + /// Storage slot name for the 5th felt of the origin token address, origin network, and scale. + pub fn conversion_info_2_slot() -> &'static StorageSlotName { + &CONVERSION_INFO_2_SLOT_NAME + } + + /// Storage slot name for the first 4 u32 felts of the metadata hash. + pub fn metadata_hash_lo_slot() -> &'static StorageSlotName { + &METADATA_HASH_LO_SLOT_NAME + } + + /// Storage slot name for the last 4 u32 felts of the metadata hash. + pub fn metadata_hash_hi_slot() -> &'static StorageSlotName { + &METADATA_HASH_HI_SLOT_NAME + } + /// Storage slot name for the owner account ID (bridge), provided by the + /// [`Ownable2Step`] companion component. + pub fn owner_config_slot() -> &'static StorageSlotName { + Ownable2Step::slot_name() + } + + /// Extracts the token metadata from the corresponding storage slot of the provided account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerFaucet`] account. + pub fn metadata(faucet_account: &Account) -> Result { + // check that the provided account is a faucet account + Self::assert_faucet_account(faucet_account)?; + + let metadata_word = faucet_account + .storage() + .get_item(TokenMetadata::metadata_slot()) + .expect("should be able to read metadata slot"); + TokenMetadata::try_from(metadata_word).map_err(AgglayerFaucetError::FungibleFaucetError) + } + + /// Extracts the bridge account ID from the [`Ownable2Step`] owner config storage slot + /// of the provided account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerFaucet`] account. + pub fn owner_account_id(faucet_account: &Account) -> Result { + // check that the provided account is a faucet account + Self::assert_faucet_account(faucet_account)?; + + let ownership = Ownable2Step::try_from_storage(faucet_account.storage()) + .map_err(AgglayerFaucetError::Ownable2StepError)?; + ownership.owner().ok_or(AgglayerFaucetError::OwnershipRenounced) + } + + /// Extracts the origin token address from the corresponding storage slot of the provided + /// account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerFaucet`] account. + pub fn origin_token_address( + faucet_account: &Account, + ) -> Result { + // check that the provided account is a faucet account + Self::assert_faucet_account(faucet_account)?; + + let conversion_info_1 = faucet_account + .storage() + .get_item(&CONVERSION_INFO_1_SLOT_NAME) + .expect("should be able to read the first conversion info slot"); + + let conversion_info_2 = faucet_account + .storage() + .get_item(&CONVERSION_INFO_2_SLOT_NAME) + .expect("should be able to read the second conversion info slot"); + + let addr_bytes_vec = conversion_info_1 + .iter() + .chain([&conversion_info_2[0]]) + .flat_map(|felt| { + u32::try_from(felt.as_canonical_u64()) + .expect("Felt value does not fit into u32") + .to_le_bytes() + }) + .collect::>(); + + Ok(EthAddressFormat::new( + addr_bytes_vec + .try_into() + .expect("origin token addr vector should consist of exactly 20 bytes"), + )) + } + + /// Extracts the origin network ID in form of the u32 from the corresponding storage slot of the + /// provided account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerFaucet`] account. + pub fn origin_network(faucet_account: &Account) -> Result { + // check that the provided account is a faucet account + Self::assert_faucet_account(faucet_account)?; + + let conversion_info_2 = faucet_account + .storage() + .get_item(&CONVERSION_INFO_2_SLOT_NAME) + .expect("should be able to read the second conversion info slot"); + + Ok(conversion_info_2[1] + .as_canonical_u64() + .try_into() + .expect("origin network ID should fit into u32")) + } + + /// Extracts the scaling factor in form of the u8 from the corresponding storage slot of the + /// provided account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account is not an [`AggLayerFaucet`] account. + pub fn scale(faucet_account: &Account) -> Result { + // check that the provided account is a faucet account + Self::assert_faucet_account(faucet_account)?; + + let conversion_info_2 = faucet_account + .storage() + .get_item(&CONVERSION_INFO_2_SLOT_NAME) + .expect("should be able to read the second conversion info slot"); + + Ok(conversion_info_2[2] + .as_canonical_u64() + .try_into() + .expect("scaling factor should fit into u8")) + } + + // HELPER FUNCTIONS + // -------------------------------------------------------------------------------------------- + + /// Checks that the provided account is an [`AggLayerFaucet`] account. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided account does not have all AggLayer Faucet specific storage slots. + /// - the provided account does not have all AggLayer Faucet specific procedures. + fn assert_faucet_account(account: &Account) -> Result<(), AgglayerFaucetError> { + // check that the storage slots are as expected + Self::assert_storage_slots(account)?; + + // check that the procedure roots are as expected + Self::assert_code_commitment(account)?; + + Ok(()) + } + + /// Checks that the provided account has all storage slots required for the [`AggLayerFaucet`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - provided account does not have all AggLayer Faucet specific storage slots). + fn assert_storage_slots(account: &Account) -> Result<(), AgglayerFaucetError> { + // get the storage slot names of the provided account + let account_storage_slot_names: Vec<&StorageSlotName> = account + .storage() + .slots() + .iter() + .map(|storage_slot| storage_slot.name()) + .collect::>(); + + // check that all bridge specific storage slots are presented in the provided account + let are_slots_present = Self::slot_names() + .iter() + .all(|slot_name| account_storage_slot_names.contains(slot_name)); + if !are_slots_present { + return Err(AgglayerFaucetError::StorageSlotsMismatch); + } + + Ok(()) + } + + /// Checks that the code commitment of the provided account matches the code commitment of the + /// [`AggLayerFaucet`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - the code commitment of the provided account does not match the code commitment of the + /// [`AggLayerFaucet`]. + fn assert_code_commitment(account: &Account) -> Result<(), AgglayerFaucetError> { + if FAUCET_CODE_COMMITMENT != account.code().commitment() { + return Err(AgglayerFaucetError::CodeCommitmentMismatch); + } + + Ok(()) + } + + /// Returns a vector of all [`AggLayerFaucet`] storage slot names. + fn slot_names() -> Vec<&'static StorageSlotName> { + vec![ + &*CONVERSION_INFO_1_SLOT_NAME, + &*CONVERSION_INFO_2_SLOT_NAME, + &*METADATA_HASH_LO_SLOT_NAME, + &*METADATA_HASH_HI_SLOT_NAME, + TokenMetadata::metadata_slot(), + Ownable2Step::slot_name(), + OwnerControlled::active_policy_proc_root_slot(), + OwnerControlled::allowed_policy_proc_roots_slot(), + OwnerControlled::policy_authority_slot(), + ] + } +} + +impl From for AccountComponent { + fn from(faucet: AggLayerFaucet) -> Self { + let metadata_slot = StorageSlot::from(faucet.metadata); + + let (conversion_slot1_word, conversion_slot2_word) = agglayer_faucet_conversion_slots( + &faucet.origin_token_address, + faucet.origin_network, + faucet.scale, + ); + let conversion_slot1 = + StorageSlot::with_value(CONVERSION_INFO_1_SLOT_NAME.clone(), conversion_slot1_word); + let conversion_slot2 = + StorageSlot::with_value(CONVERSION_INFO_2_SLOT_NAME.clone(), conversion_slot2_word); + + let hash_elements = faucet.metadata_hash.to_elements(); + let metadata_hash_lo = StorageSlot::with_value( + METADATA_HASH_LO_SLOT_NAME.clone(), + Word::new([hash_elements[0], hash_elements[1], hash_elements[2], hash_elements[3]]), + ); + let metadata_hash_hi = StorageSlot::with_value( + METADATA_HASH_HI_SLOT_NAME.clone(), + Word::new([hash_elements[4], hash_elements[5], hash_elements[6], hash_elements[7]]), + ); + + let agglayer_storage_slots = vec![ + metadata_slot, + conversion_slot1, + conversion_slot2, + metadata_hash_lo, + metadata_hash_hi, + ]; + agglayer_faucet_component(agglayer_storage_slots) + } +} + +// AGGLAYER FAUCET ERROR +// ================================================================================================ + +/// AggLayer Faucet related errors. +#[derive(Debug, Error)] +pub enum AgglayerFaucetError { + #[error( + "provided account does not have storage slots required for the AggLayer Faucet account" + )] + StorageSlotsMismatch, + #[error("provided account does not have procedures required for the AggLayer Faucet account")] + CodeCommitmentMismatch, + #[error("fungible faucet error")] + FungibleFaucetError(#[source] FungibleFaucetError), + #[error("account ID error")] + AccountIdError(#[source] AccountIdError), + #[error("ownable2step error")] + Ownable2StepError(#[source] miden_standards::account::access::Ownable2StepError), + #[error("faucet ownership has been renounced")] + OwnershipRenounced, +} + +// FAUCET CONVERSION STORAGE HELPERS +// ================================================================================================ + +/// Builds the two storage slot values for faucet conversion metadata. +/// +/// The conversion metadata is stored in two value storage slots: +/// - Slot 1 (`agglayer::faucet::conversion_info_1`): `[addr0, addr1, addr2, addr3]` — first 4 felts +/// of the origin token address (5 × u32 limbs). +/// - Slot 2 (`agglayer::faucet::conversion_info_2`): `[addr4, origin_network, scale, 0]` — +/// remaining address felt + origin network + scale factor. +/// +/// # Parameters +/// - `origin_token_address`: The EVM token address in Ethereum format +/// - `origin_network`: The origin network/chain ID +/// - `scale`: The decimal scaling factor (exponent for 10^scale) +/// +/// # Returns +/// A tuple of two `Word` values representing the two storage slot contents. +fn agglayer_faucet_conversion_slots( + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, +) -> (Word, Word) { + let addr_elements = origin_token_address.to_elements(); + + let slot1 = Word::new([addr_elements[0], addr_elements[1], addr_elements[2], addr_elements[3]]); + + let slot2 = + Word::new([addr_elements[4], Felt::from(origin_network), Felt::from(scale), Felt::ZERO]); + + (slot1, slot2) +} + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Creates an Agglayer Faucet component with the specified storage slots. +/// +/// This component combines network faucet functionality with bridge validation +/// via Foreign Procedure Invocation (FPI). It provides a "claim" procedure that +/// validates CLAIM notes against a bridge MMR account before minting assets. +fn agglayer_faucet_component(storage_slots: Vec) -> AccountComponent { + let library = agglayer_faucet_component_library(); + let metadata = AccountComponentMetadata::new("agglayer::faucet", [AccountType::FungibleFaucet]) + .with_description("AggLayer faucet component with bridge validation"); + + AccountComponent::new(library, storage_slots, metadata).expect( + "agglayer_faucet component should satisfy the requirements of a valid account component", + ) +} diff --git a/crates/miden-agglayer/src/lib.rs b/crates/miden-agglayer/src/lib.rs index e4e0eae9c9..785b0b521e 100644 --- a/crates/miden-agglayer/src/lib.rs +++ b/crates/miden-agglayer/src/lib.rs @@ -2,13 +2,9 @@ extern crate alloc; -use alloc::vec; -use alloc::vec::Vec; - use miden_assembly::Library; -use miden_assembly::utils::Deserializable; -use miden_core::{Felt, FieldElement, Program, Word}; -use miden_protocol::account::component::AccountComponentMetadata; +use miden_assembly::serde::Deserializable; +use miden_core::{Felt, Word}; use miden_protocol::account::{ Account, AccountBuilder, @@ -16,33 +12,37 @@ use miden_protocol::account::{ AccountId, AccountStorageMode, AccountType, - StorageSlot, - StorageSlotName, }; use miden_protocol::asset::TokenSymbol; use miden_protocol::note::NoteScript; +use miden_protocol::vm::Program; +use miden_standards::account::access::Ownable2Step; use miden_standards::account::auth::NoAuth; -use miden_standards::account::faucets::NetworkFungibleFaucet; +use miden_standards::account::mint_policies::OwnerControlled; use miden_utils_sync::LazyLock; pub mod b2agg_note; +pub mod bridge; pub mod claim_note; +pub mod config_note; pub mod errors; pub mod eth_types; +pub mod faucet; pub mod update_ger_note; -pub mod utils; pub use b2agg_note::B2AggNote; -pub use claim_note::{ - ClaimNoteStorage, - ExitRoot, - LeafData, - OutputNoteData, - ProofData, - SmtNode, - create_claim_note, +pub use bridge::{AggLayerBridge, AgglayerBridgeError}; +pub use claim_note::{ClaimNoteStorage, ExitRoot, LeafData, ProofData, SmtNode, create_claim_note}; +pub use config_note::ConfigAggBridgeNote; +pub use eth_types::{ + EthAddressFormat, + EthAmount, + EthAmountError, + GlobalIndex, + GlobalIndexError, + MetadataHash, }; -pub use eth_types::{EthAddressFormat, EthAmount, EthAmountError}; +pub use faucet::{AggLayerFaucet, AgglayerFaucetError}; pub use update_ger_note::UpdateGerNote; // AGGLAYER NOTE SCRIPTS @@ -51,7 +51,7 @@ pub use update_ger_note::UpdateGerNote; // Initialize the CLAIM note script only once static CLAIM_SCRIPT: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/CLAIM.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped CLAIM script is well-formed"); + let program = Program::read_from_bytes(bytes).expect("shipped CLAIM script is well-formed"); NoteScript::new(program) }); @@ -63,294 +63,232 @@ pub fn claim_script() -> NoteScript { // AGGLAYER ACCOUNT COMPONENTS // ================================================================================================ -// Initialize the unified AggLayer library only once static AGGLAYER_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/agglayer.masl")); - Library::read_from_bytes(bytes).expect("Shipped AggLayer library is well-formed") + Library::read_from_bytes(bytes).expect("shipped AggLayer library is well-formed") }); -/// Returns the unified AggLayer Library containing all agglayer modules. -pub fn agglayer_library() -> Library { - AGGLAYER_LIBRARY.clone() -} - -/// Returns the Bridge Out Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn bridge_out_library() -> Library { - agglayer_library() -} - -/// Returns the Local Exit Tree Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn local_exit_tree_library() -> Library { - agglayer_library() -} - -/// Creates a Local Exit Tree component with the specified storage slots. -/// -/// This component uses the local_exit_tree library and can be added to accounts -/// that need to manage local exit tree functionality. -pub fn local_exit_tree_component(storage_slots: Vec) -> AccountComponent { - let library = local_exit_tree_library(); - let metadata = AccountComponentMetadata::new("agglayer::local_exit_tree") - .with_description("Local exit tree component for AggLayer") - .with_supports_all_types(); - - AccountComponent::new(library, storage_slots, metadata).expect( - "local_exit_tree component should satisfy the requirements of a valid account component", - ) -} - -/// Creates a Bridge Out component with the specified storage slots. -/// -/// This component uses the bridge_out library and can be added to accounts -/// that need to bridge assets out to the AggLayer. -pub fn bridge_out_component(storage_slots: Vec) -> AccountComponent { - let library = bridge_out_library(); - let metadata = AccountComponentMetadata::new("agglayer::bridge_out") - .with_description("Bridge out component for AggLayer") - .with_supports_all_types(); - - AccountComponent::new(library, storage_slots, metadata) - .expect("bridge_out component should satisfy the requirements of a valid account component") -} - -/// Returns the Bridge In Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn bridge_in_library() -> Library { - agglayer_library() -} - -/// Creates a Bridge In component with the specified storage slots. -/// -/// This component uses the agglayer library and can be added to accounts -/// that need to bridge assets in from the AggLayer. -pub fn bridge_in_component(storage_slots: Vec) -> AccountComponent { - let library = bridge_in_library(); - let metadata = AccountComponentMetadata::new("agglayer::bridge_in") - .with_description("Bridge in component for AggLayer") - .with_supports_all_types(); - - AccountComponent::new(library, storage_slots, metadata) - .expect("bridge_in component should satisfy the requirements of a valid account component") -} - -/// Returns the Agglayer Faucet Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn agglayer_faucet_library() -> Library { - agglayer_library() -} +static BRIDGE_COMPONENT_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/components/bridge.masl")); + Library::read_from_bytes(bytes).expect("shipped bridge component library is well-formed") +}); -/// Creates an Agglayer Faucet component with the specified storage slots. -/// -/// This component combines network faucet functionality with bridge validation -/// via Foreign Procedure Invocation (FPI). It provides a "claim" procedure that -/// validates CLAIM notes against a bridge MMR account before minting assets. -pub fn agglayer_faucet_component(storage_slots: Vec) -> AccountComponent { - let library = agglayer_faucet_library(); - let metadata = AccountComponentMetadata::new("agglayer::faucet") - .with_description("AggLayer faucet component with bridge validation") - .with_supported_type(AccountType::FungibleFaucet); +static FAUCET_COMPONENT_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/components/faucet.masl")); + Library::read_from_bytes(bytes).expect("shipped faucet component library is well-formed") +}); - AccountComponent::new(library, storage_slots, metadata).expect( - "agglayer_faucet component should satisfy the requirements of a valid account component", - ) +/// Returns the AggLayer Library containing all agglayer modules. +pub fn agglayer_library() -> Library { + AGGLAYER_LIBRARY.clone() } -/// Creates a combined Bridge Out component that includes both bridge_out and local_exit_tree -/// modules. -/// -/// This is a convenience function that creates a component with multiple modules. -/// For more fine-grained control, use the individual component functions and combine them -/// using the AccountBuilder pattern. -pub fn bridge_out_with_local_exit_tree_component( - storage_slots: Vec, -) -> Vec { - vec![ - bridge_out_component(storage_slots.clone()), - local_exit_tree_component(vec![]), // local_exit_tree typically doesn't need storage slots - ] +/// Returns the Bridge component library. +fn agglayer_bridge_component_library() -> Library { + BRIDGE_COMPONENT_LIBRARY.clone() } -/// Creates an Asset Conversion component with the specified storage slots. -/// -/// This component uses the agglayer library (which includes asset_conversion) and can be added to -/// accounts that need to convert assets between Miden and Ethereum formats. -pub fn asset_conversion_component(storage_slots: Vec) -> AccountComponent { - let library = agglayer_library(); - let metadata = AccountComponentMetadata::new("agglayer::asset_conversion") - .with_description("Asset conversion component for Miden/Ethereum formats") - .with_supports_all_types(); - - AccountComponent::new(library, storage_slots, metadata).expect( - "asset_conversion component should satisfy the requirements of a valid account component", - ) +/// Returns the Faucet component library. +fn agglayer_faucet_component_library() -> Library { + FAUCET_COMPONENT_LIBRARY.clone() } // AGGLAYER ACCOUNT CREATION HELPERS // ================================================================================================ -/// Creates a bridge account component with the standard bridge storage slot. -/// -/// This is a convenience function that creates the bridge storage slot with the standard -/// name "miden::agglayer::bridge" and returns the bridge_out component. -/// -/// # Returns -/// Returns an [`AccountComponent`] configured for bridge operations with MMR validation. -pub fn create_bridge_account_component() -> AccountComponent { - let bridge_storage_slot_name = StorageSlotName::new("miden::agglayer::bridge") - .expect("Bridge storage slot name should be valid"); - let bridge_storage_slots = vec![StorageSlot::with_empty_map(bridge_storage_slot_name)]; - bridge_out_component(bridge_storage_slots) -} - /// Creates an agglayer faucet account component with the specified configuration. /// /// This function creates all the necessary storage slots for an agglayer faucet: -/// - Network faucet metadata slot (max_supply, decimals, token_symbol) -/// - Bridge account reference slot for FPI validation +/// - Network faucet metadata slot (token_supply, max_supply, decimals, token_symbol) +/// - Conversion info slot 1: first 4 felts of origin token address +/// - Conversion info slot 2: 5th address felt + origin network + scale +/// - Owner config slot: bridge account ID for MINT note authorization /// /// # Parameters /// - `token_symbol`: The symbol for the fungible token (e.g., "AGG") /// - `decimals`: Number of decimal places for the token /// - `max_supply`: Maximum supply of the token +/// - `token_supply`: Initial outstanding token supply (0 for new faucets) /// - `bridge_account_id`: The account ID of the bridge account for validation +/// - `origin_token_address`: The EVM origin token address +/// - `origin_network`: The origin network/chain ID +/// - `scale`: The decimal scaling factor (exponent for 10^scale) /// /// # Returns /// Returns an [`AccountComponent`] configured for agglayer faucet operations. /// /// # Panics -/// Panics if the token symbol is invalid or storage slot names are malformed. -pub fn create_agglayer_faucet_component( +/// Panics if the token symbol is invalid or metadata validation fails. +#[allow(clippy::too_many_arguments)] +fn create_agglayer_faucet_component( token_symbol: &str, decimals: u8, max_supply: Felt, - bridge_account_id: AccountId, + token_supply: Felt, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, + metadata_hash: MetadataHash, ) -> AccountComponent { - // Create network faucet metadata slot: [0, max_supply, decimals, token_symbol] - let token_symbol = TokenSymbol::new(token_symbol).expect("Token symbol should be valid"); - let metadata_word = - Word::new([FieldElement::ZERO, max_supply, Felt::from(decimals), token_symbol.into()]); - let metadata_slot = - StorageSlot::with_value(NetworkFungibleFaucet::metadata_slot().clone(), metadata_word); - - // Create agglayer-specific bridge storage slot - let bridge_account_id_word = Word::new([ - Felt::new(0), - Felt::new(0), - bridge_account_id.suffix(), - bridge_account_id.prefix().as_felt(), - ]); - let agglayer_storage_slot_name = StorageSlotName::new("miden::agglayer::faucet") - .expect("Agglayer faucet storage slot name should be valid"); - let bridge_slot = StorageSlot::with_value(agglayer_storage_slot_name, bridge_account_id_word); - - // Combine all storage slots for the agglayer faucet component - let agglayer_storage_slots = vec![metadata_slot, bridge_slot]; - agglayer_faucet_component(agglayer_storage_slots) + let symbol = TokenSymbol::new(token_symbol).expect("token symbol should be valid"); + AggLayerFaucet::new( + symbol, + decimals, + max_supply, + token_supply, + *origin_token_address, + origin_network, + scale, + metadata_hash, + ) + .expect("agglayer faucet metadata should be valid") + .into() } /// Creates a complete bridge account builder with the standard configuration. -pub fn create_bridge_account_builder(seed: Word) -> AccountBuilder { - // Create the "bridge_in" component - let ger_upper_storage_slot_name = StorageSlotName::new("miden::agglayer::bridge::ger_upper") - .expect("Bridge storage slot name should be valid"); - let ger_lower_storage_slot_name = StorageSlotName::new("miden::agglayer::bridge::ger_lower") - .expect("Bridge storage slot name should be valid"); - let bridge_in_storage_slots = vec![ - StorageSlot::with_value(ger_upper_storage_slot_name, Word::empty()), - StorageSlot::with_value(ger_lower_storage_slot_name, Word::empty()), - ]; - - let bridge_in_component = bridge_in_component(bridge_in_storage_slots); - - // Create the "bridge_out" component - let let_storage_slot_name = StorageSlotName::new("miden::agglayer::let").unwrap(); - let bridge_out_storage_slots = vec![StorageSlot::with_empty_map(let_storage_slot_name)]; - let bridge_out_component = bridge_out_component(bridge_out_storage_slots); - - // Combine the components into a single account(builder) +/// +/// The bridge starts with an empty faucet registry. Faucets are registered at runtime +/// via CONFIG_AGG_BRIDGE notes that call `bridge_config::register_faucet`. +fn create_bridge_account_builder( + seed: Word, + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +) -> AccountBuilder { Account::builder(seed.into()) .storage_mode(AccountStorageMode::Network) - .with_component(bridge_out_component) - .with_component(bridge_in_component) + .with_component(AggLayerBridge::new(bridge_admin_id, ger_manager_id)) } /// Creates a new bridge account with the standard configuration. /// /// This creates a new account suitable for production use. -pub fn create_bridge_account(seed: Word) -> Account { - create_bridge_account_builder(seed) +pub fn create_bridge_account( + seed: Word, + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +) -> Account { + create_bridge_account_builder(seed, bridge_admin_id, ger_manager_id) .with_auth_component(AccountComponent::from(NoAuth)) .build() - .expect("Bridge account should be valid") + .expect("bridge account should be valid") } /// Creates an existing bridge account with the standard configuration. /// /// This creates an existing account suitable for testing scenarios. #[cfg(any(feature = "testing", test))] -pub fn create_existing_bridge_account(seed: Word) -> Account { - create_bridge_account_builder(seed) +pub fn create_existing_bridge_account( + seed: Word, + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +) -> Account { + create_bridge_account_builder(seed, bridge_admin_id, ger_manager_id) .with_auth_component(AccountComponent::from(NoAuth)) .build_existing() - .expect("Bridge account should be valid") + .expect("bridge account should be valid") } /// Creates a complete agglayer faucet account builder with the specified configuration. -pub fn create_agglayer_faucet_builder( +/// +/// The builder includes: +/// - The `AggLayerFaucet` component (conversion metadata + token metadata). +/// - The `Ownable2Step` component (bridge account ID as owner for mint authorization). +/// - The `OwnerControlled` component (mint policy management required by +/// `network_fungible::mint_and_send`). +#[allow(clippy::too_many_arguments)] +fn create_agglayer_faucet_builder( seed: Word, token_symbol: &str, decimals: u8, max_supply: Felt, + token_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, + metadata_hash: MetadataHash, ) -> AccountBuilder { - let agglayer_component = - create_agglayer_faucet_component(token_symbol, decimals, max_supply, bridge_account_id); + let agglayer_component = create_agglayer_faucet_component( + token_symbol, + decimals, + max_supply, + token_supply, + origin_token_address, + origin_network, + scale, + metadata_hash, + ); Account::builder(seed.into()) .account_type(AccountType::FungibleFaucet) .storage_mode(AccountStorageMode::Network) .with_component(agglayer_component) + .with_component(Ownable2Step::new(bridge_account_id)) + .with_component(OwnerControlled::owner_only()) } /// Creates a new agglayer faucet account with the specified configuration. /// /// This creates a new account suitable for production use. +#[allow(clippy::too_many_arguments)] pub fn create_agglayer_faucet( seed: Word, token_symbol: &str, decimals: u8, max_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, + metadata_hash: MetadataHash, ) -> Account { - create_agglayer_faucet_builder(seed, token_symbol, decimals, max_supply, bridge_account_id) - .with_auth_component(AccountComponent::from(NoAuth)) - .build() - .expect("Agglayer faucet account should be valid") + create_agglayer_faucet_builder( + seed, + token_symbol, + decimals, + max_supply, + Felt::ZERO, + bridge_account_id, + origin_token_address, + origin_network, + scale, + metadata_hash, + ) + .with_auth_component(AccountComponent::from(NoAuth)) + .build() + .expect("agglayer faucet account should be valid") } /// Creates an existing agglayer faucet account with the specified configuration. /// /// This creates an existing account suitable for testing scenarios. #[cfg(any(feature = "testing", test))] +#[allow(clippy::too_many_arguments)] pub fn create_existing_agglayer_faucet( seed: Word, token_symbol: &str, decimals: u8, max_supply: Felt, + token_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, + metadata_hash: MetadataHash, ) -> Account { - create_agglayer_faucet_builder(seed, token_symbol, decimals, max_supply, bridge_account_id) - .with_auth_component(AccountComponent::from(NoAuth)) - .build_existing() - .expect("Agglayer faucet account should be valid") + create_agglayer_faucet_builder( + seed, + token_symbol, + decimals, + max_supply, + token_supply, + bridge_account_id, + origin_token_address, + origin_network, + scale, + metadata_hash, + ) + .with_auth_component(AccountComponent::from(NoAuth)) + .build_existing() + .expect("agglayer faucet account should be valid") } diff --git a/crates/miden-agglayer/src/update_ger_note.rs b/crates/miden-agglayer/src/update_ger_note.rs index a9928aec17..07246db9f6 100644 --- a/crates/miden-agglayer/src/update_ger_note.rs +++ b/crates/miden-agglayer/src/update_ger_note.rs @@ -8,8 +8,9 @@ extern crate alloc; use alloc::string::ToString; use alloc::vec; -use miden_assembly::utils::Deserializable; -use miden_core::{Program, Word}; +use miden_assembly::serde::Deserializable; +use miden_core::Word; +use miden_core::program::Program; use miden_protocol::account::AccountId; use miden_protocol::crypto::rand::FeltRng; use miden_protocol::errors::NoteError; @@ -35,7 +36,7 @@ use crate::ExitRoot; static UPDATE_GER_SCRIPT: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/UPDATE_GER.masb")); let program = - Program::read_from_bytes(bytes).expect("Shipped UPDATE_GER script is well-formed"); + Program::read_from_bytes(bytes).expect("shipped UPDATE_GER script is well-formed"); NoteScript::new(program) }); diff --git a/crates/miden-agglayer/src/utils.rs b/crates/miden-agglayer/src/utils.rs deleted file mode 100644 index 88850de58c..0000000000 --- a/crates/miden-agglayer/src/utils.rs +++ /dev/null @@ -1,28 +0,0 @@ -use miden_core::FieldElement; -use miden_protocol::Felt; - -// UTILITY FUNCTIONS -// ================================================================================================ - -/// Converts a bytes32 value (32 bytes) into an array of 8 Felt values. -/// -/// Note: These utility functions will eventually be replaced with similar functions from miden-vm. -pub fn bytes32_to_felts(bytes32: &[u8; 32]) -> [Felt; 8] { - let mut result = [Felt::ZERO; 8]; - for (i, chunk) in bytes32.chunks(4).enumerate() { - let value = u32::from_be_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]); - result[i] = Felt::from(value); - } - result -} - -/// Convert 8 Felt values (u32 limbs in little-endian order) to U256 bytes in little-endian format. -pub fn felts_to_u256_bytes(limbs: [Felt; 8]) -> [u8; 32] { - let mut bytes = [0u8; 32]; - for (i, limb) in limbs.iter().enumerate() { - let u32_value = limb.as_int() as u32; - let limb_bytes = u32_value.to_le_bytes(); - bytes[i * 4..(i + 1) * 4].copy_from_slice(&limb_bytes); - } - bytes -} diff --git a/crates/miden-protocol-macros/tests/integration_test.rs b/crates/miden-protocol-macros/tests/integration_test.rs index 05a925e4d5..46f807852f 100644 --- a/crates/miden-protocol-macros/tests/integration_test.rs +++ b/crates/miden-protocol-macros/tests/integration_test.rs @@ -1,6 +1,6 @@ #[cfg(test)] mod tests { - use miden_protocol::{Felt, FieldElement, Word}; + use miden_protocol::{Felt, Word}; use miden_protocol_macros::WordWrapper; #[derive(Debug, Clone, Copy, PartialEq, Eq, WordWrapper)] diff --git a/crates/miden-protocol/Cargo.toml b/crates/miden-protocol/Cargo.toml index 487946b16a..74f08d132b 100644 --- a/crates/miden-protocol/Cargo.toml +++ b/crates/miden-protocol/Cargo.toml @@ -31,7 +31,7 @@ std = [ "miden-processor/std", "miden-verifier/std", ] -testing = ["dep:rand_chacha", "dep:rand_xoshiro", "dep:winter-rand-utils", "miden-air/testing"] +testing = ["dep:rand_chacha", "dep:rand_xoshiro", "miden-core/testing", "miden-crypto/std"] [dependencies] # Miden dependencies @@ -45,7 +45,6 @@ miden-processor = { workspace = true } miden-protocol-macros = { workspace = true } miden-utils-sync = { workspace = true } miden-verifier = { workspace = true } -winter-rand-utils = { optional = true, version = "0.13" } # External dependencies bech32 = { default-features = false, features = ["alloc"], version = "0.11" } @@ -54,7 +53,7 @@ rand_xoshiro = { default-features = false, optional = true, version = "0.7" } semver = { features = ["serde"], version = "1.0" } serde = { features = ["derive"], optional = true, workspace = true } thiserror = { workspace = true } -toml = { optional = true, version = "0.9" } +toml = { optional = true, version = "1.0" } # for SecretKey generation rand_chacha = { optional = true, workspace = true } @@ -64,15 +63,12 @@ getrandom = { features = ["wasm_js"], version = "0.3" } [dev-dependencies] anyhow = { features = ["backtrace", "std"], workspace = true } assert_matches = { workspace = true } +color-eyre = { version = "0.5" } criterion = { default-features = false, features = ["html_reports"], version = "0.5" } miden-protocol = { features = ["testing"], path = "." } pprof = { default-features = false, features = ["criterion", "flamegraph"], version = "0.15" } rstest = { workspace = true } tempfile = { version = "3.19" } -winter-air = { version = "0.13" } -# for HashFunction/ExecutionProof::new_dummy -color-eyre = { version = "0.5" } -miden-air = { features = ["std", "testing"], workspace = true } [build-dependencies] fs-err = { workspace = true } diff --git a/crates/miden-protocol/asm/kernels/transaction/api.masm b/crates/miden-protocol/asm/kernels/transaction/api.masm index 33654d5543..703e3cdab3 100644 --- a/crates/miden-protocol/asm/kernels/transaction/api.masm +++ b/crates/miden-protocol/asm/kernels/transaction/api.masm @@ -1,18 +1,21 @@ +use $kernel::asset use $kernel::account use $kernel::account_delta use $kernel::account_id use $kernel::faucet use $kernel::input_note use $kernel::memory -use $kernel::note use $kernel::output_note use $kernel::tx +# use $kernel::types::AccountId use $kernel::memory::UPCOMING_FOREIGN_PROCEDURE_PTR use $kernel::memory::UPCOMING_FOREIGN_PROC_INPUT_VALUE_15_PTR use miden::core::word +pub type AccountId = struct { prefix: felt, suffix: felt } + # NOTE # ================================================================================================= # `exec_kernel_proc` procedure is expected to be invoked using a `syscall` instruction. It makes # @@ -22,12 +25,6 @@ use miden::core::word # the memory. # # ================================================================================================= -# TYPE ALIASES -# ================================================================================================= - -type AccountID = struct { prefix: felt, suffix: felt } -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - # ERRORS # ================================================================================================= @@ -170,7 +167,7 @@ end #! Returns the ID of the specified account. #! #! Inputs: [is_native, pad(15)] -#! Outputs: [account_id_prefix, account_id_suffix, pad(14)] +#! Outputs: [account_id_suffix, account_id_prefix, pad(14)] #! #! Where: #! - is_native is a boolean flag that indicates whether the account ID was requested for the native @@ -181,38 +178,38 @@ end pub proc account_get_id # get the native account ID exec.memory::get_native_account_id - # => [native_account_id_prefix, native_account_id_suffix, is_native, pad(15)] + # => [native_account_id_suffix, native_account_id_prefix, is_native, pad(15)] # get the active account ID exec.account::get_id # => [ - # active_account_id_prefix, active_account_id_suffix, - # native_account_id_prefix, native_account_id_suffix, + # active_account_id_suffix, active_account_id_prefix, + # native_account_id_suffix, native_account_id_prefix, # is_native, pad(15) # ] # prepare the stack for the first cdrop movup.2 dup.4 # => [ - # is_native, native_account_id_prefix, active_account_id_prefix, - # active_account_id_suffix, native_account_id_suffix, is_native, pad(15) + # is_native, native_account_id_suffix, active_account_id_suffix, + # active_account_id_prefix, native_account_id_prefix, is_native, pad(15) # ] - # drop the prefix corresponding to the is_native flag + # drop the suffix corresponding to the is_native flag cdrop - # => [account_id_prefix, active_account_id_suffix, native_account_id_suffix, is_native, pad(15)] + # => [account_id_suffix, active_account_id_prefix, native_account_id_prefix, is_native, pad(15)] # prepare the stack for the second cdrop movdn.3 swap movup.2 - # => [is_native, native_account_id_suffix, active_account_id_suffix, account_id_prefix, pad(15)] + # => [is_native, native_account_id_prefix, active_account_id_prefix, account_id_suffix, pad(15)] - # drop the suffix corresponding to the is_native flag + # drop the prefix corresponding to the is_native flag cdrop - # => [account_id_suffix, account_id_prefix, pad(15)] + # => [account_id_prefix, account_id_suffix, pad(15)] - # rearrange the ID parts and truncate the stack + # rearrange the ID parts and truncate the stack swap movup.2 drop - # => [account_id_prefix, account_id_suffix, pad(14)] + # => [account_id_suffix, account_id_prefix, pad(14)] end #! Returns the active account nonce. @@ -335,11 +332,11 @@ end #! Gets an item from the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, pad(14)] +#! Inputs: [slot_id_suffix, slot_id_prefix, pad(14)] #! Outputs: [VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! @@ -350,7 +347,7 @@ end pub proc account_get_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, pad(14)] + # => [slot_id_suffix, slot_id_prefix, pad(14)] # fetch the account storage item exec.account::get_item @@ -363,11 +360,11 @@ end #! Sets an item in the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] +#! Inputs: [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] #! Outputs: [OLD_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value to set. #! - OLD_VALUE is the previous value of the item. @@ -380,11 +377,11 @@ end pub proc account_set_item # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] + # => [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] + # => [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] # set the account storage item exec.account::set_item @@ -394,11 +391,11 @@ end #! Returns the VALUE located under the specified KEY within the map contained in the account #! storage slot identified by the slot ID. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, pad(10)] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, pad(10)] #! Outputs: [VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the map item at KEY. #! @@ -410,7 +407,7 @@ end pub proc account_get_map_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, KEY, pad(10)] + # => [slot_id_suffix, slot_id_prefix, KEY, pad(10)] # fetch the map item from account storage exec.account::get_map_item @@ -419,11 +416,11 @@ end #! Gets an item from the account storage at its initial state (beginning of transaction). #! -#! Inputs: [slot_id_prefix, slot_id_suffix, pad(14)] +#! Inputs: [slot_id_suffix, slot_id_prefix, pad(14)] #! Outputs: [INIT_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. #! @@ -434,7 +431,7 @@ end pub proc account_get_initial_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, pad(14)] + # => [slot_id_suffix, slot_id_prefix, pad(14)] # fetch the initial account storage item exec.account::get_initial_item @@ -448,11 +445,11 @@ end #! Returns the initial VALUE located under the specified KEY within the map contained in the #! account storage slot identified by the slot ID at the beginning of the transaction. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, pad(10)] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, pad(10)] #! Outputs: [INIT_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - INIT_VALUE is the initial value of the map item at KEY at the beginning of the transaction. @@ -465,7 +462,7 @@ end pub proc account_get_initial_map_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, KEY, pad(10)] + # => [slot_id_suffix, slot_id_prefix, KEY, pad(10)] # fetch the initial map item from account storage exec.account::get_initial_map_item @@ -475,11 +472,11 @@ end #! Stores NEW_VALUE under the specified KEY within the map contained in the given account storage #! slot. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE, pad(6)] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE, pad(6)] #! Outputs: [OLD_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - NEW_VALUE is the value of the new map item for the respective KEY. @@ -496,11 +493,11 @@ end pub proc account_set_map_item # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE, pad(6)] + # => [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE, pad(6)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE, pad(6)] + # => [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE, pad(6)] # set the new map item exec.account::set_map_item @@ -551,15 +548,16 @@ end #! Adds the specified asset to the vault. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET', pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [ASSET_VALUE', pad(12)] #! #! Where: -#! - ASSET is the asset to add to the vault. -#! - ASSET' final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset to add to the vault. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. @@ -572,24 +570,26 @@ end pub proc account_add_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # add the specified asset to the account vault, emitting the corresponding events exec.account::add_asset_to_vault - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] end -#! Removes the specified asset from the vault. +#! Removes the specified asset from the vault and returns the remaining asset value. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [REMAINING_ASSET_VALUE, pad(12)] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. +#! - REMAINING_ASSET_VALUE is the value of the asset remaining in the vault after removal. #! #! Panics if: #! - the fungible asset is not found in the vault. @@ -601,50 +601,53 @@ end pub proc account_remove_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # remove the specified asset from the account vault, emitting the corresponding events exec.account::remove_asset_from_vault - # => [ASSET, pad(12)] + # => [REMAINING_ASSET_VALUE, pad(12)] end -#! Returns the ASSET associated with the provided asset vault key in the active account's vault. +#! Returns the asset associated with the provided asset vault key in the active account's vault. #! #! Inputs: [ASSET_KEY, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Outputs: [ASSET_VALUE, pad(12)] #! #! Where: #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset from the vault, which can be the EMPTY_WORD if it isn't present. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't +#! present. #! #! Invocation: dynexec pub proc account_get_asset - # TODO(expand_assets): Validate ASSET_KEY once validation logic exists. + exec.asset::validate_key + # => [ASSET_KEY, pad(12)] exec.account::get_asset - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] end -#! Returns the ASSET associated with the provided asset vault key in the active account's vault at +#! Returns the asset associated with the provided asset vault key in the active account's vault at #! the beginning of the transaction. #! #! Inputs: [ASSET_KEY, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Outputs: [ASSET_VALUE, pad(12)] #! #! Where: #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset from the vault, which can be the EMPTY_WORD if it isn't present. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. #! #! Invocation: dynexec pub proc account_get_initial_asset - # TODO(expand_assets): Validate ASSET_KEY once validation logic exists. + exec.asset::validate_key + # => [ASSET_KEY, pad(12)] exec.account::get_initial_asset - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] end #! Returns 1 if a native account procedure was called during transaction execution, and 0 otherwise. @@ -737,11 +740,15 @@ end #! Mint an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [NEW_ASSET_VALUE, pad(12)] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset that was minted. +#! - NEW_ASSET_VALUE is: +#! - For fungible assets: the ASSET_VALUE merged with the existing vault asset value, if any. +#! - For non-fungible assets: identical to ASSET_VALUE. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -758,24 +765,25 @@ end pub proc faucet_mint_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # mint the asset exec.faucet::mint - # => [ASSET, pad(12)] + # => [NEW_ASSET_VALUE, pad(12)] end #! Burn an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [pad(16)] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset to burn. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -793,15 +801,33 @@ end pub proc faucet_burn_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # burn the asset exec.faucet::burn - # => [ASSET, pad(12)] + # => [pad(16)] +end + +#! Returns whether the active account defines callbacks. +#! +#! Inputs: [pad(16)] +#! Outputs: [has_callbacks, pad(15)] +#! +#! Where: +#! - has_callbacks is 1 if the account defines callbacks, 0 otherwise. +#! +#! Invocation: dynexec +pub proc faucet_has_callbacks + exec.account::has_callbacks + # => [has_callbacks, pad(16)] + + # truncate the stack + swap drop + # => [has_callbacks, pad(15)] end # INPUT NOTE @@ -1080,14 +1106,15 @@ pub proc output_note_create # => [note_idx, pad(15)] end -#! Adds the ASSET to the note specified by the index. +#! Adds the asset to the note specified by the index. #! -#! Inputs: [note_idx, ASSET, pad(11)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] #! Outputs: [pad(16)] #! #! Where: #! - note_idx is the index of the note to which the asset is added. -#! - ASSET can be a fungible or non-fungible asset. +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! #! Panics if: #! - the procedure is called when the active account is not the native one. @@ -1096,7 +1123,7 @@ end pub proc output_note_add_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [note_idx, ASSET, pad(11)] + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] exec.output_note::add_asset # => [pad(16)] @@ -1376,7 +1403,7 @@ end #! 16th value of the foreign procedure inputs to the kernel memory: this allows to FPI any account #! procedure, even if it has 16 input values. #! -#! Inputs: [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(9)] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(9)] #! Outputs: [pad(16)] #! #! Where: @@ -1390,10 +1417,10 @@ end #! - the provided foreign account ID is invalid. #! #! Invocation: dynexec -pub proc tx_prepare_fpi(foreign_account_id: AccountID, foreign_proc_root: BeWord, foreign_procedure_input_15: felt) +pub proc tx_prepare_fpi(foreign_account_id: AccountId, foreign_proc_root: word, foreign_procedure_input_15: felt) # validate the provided foreign account ID dup.1 dup.1 exec.account_id::validate - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(9)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(9)] # store the foreign account ID exec.memory::set_fpi_account_id @@ -1443,11 +1470,11 @@ pub proc tx_exec_foreign_proc # load the ID of the foreign account onto the stack exec.memory::get_fpi_account_id - # => [foreign_account_id_prefix, foreign_account_id_suffix, foreign_procedure_inputs(16)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, foreign_procedure_inputs(16)] # check that foreign account ID is not equal zero dup.1 eq.0 dup.1 eq.0 and not assert.err=ERR_FOREIGN_ACCOUNT_ID_IS_ZERO - # => [foreign_account_id_prefix, foreign_account_id_suffix, foreign_procedure_inputs(16)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, foreign_procedure_inputs(16)] # load the foreign account to the memory exec.tx::start_foreign_context @@ -1458,7 +1485,7 @@ pub proc tx_exec_foreign_proc # => [foreign_proc_root_ptr, foreign_procedure_inputs(16)] # check that the foreign procedure root is not zero - padw mem_loadw_be.UPCOMING_FOREIGN_PROCEDURE_PTR + padw mem_loadw_le.UPCOMING_FOREIGN_PROCEDURE_PTR # => [FOREIGN_PROC_ROOT, foreign_proc_root_ptr, foreign_procedure_inputs(16)] exec.word::eqz assertz.err=ERR_FOREIGN_ACCOUNT_PROCEDURE_ROOT_IS_ZERO @@ -1471,6 +1498,10 @@ pub proc tx_exec_foreign_proc # end the foreign context exec.tx::end_foreign_context # => [foreign_procedure_outputs(16)] + + # clear the foreign procedure ID and root in memory + exec.tx::clear_fpi_memory + # => [foreign_procedure_outputs(16)] end #! Updates the transaction expiration block delta. diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/account.masm b/crates/miden-protocol/asm/kernels/transaction/lib/account.masm index 83854cd6af..40003cfe92 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/account.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/account.masm @@ -1,6 +1,9 @@ use $kernel::account_delta use $kernel::account_id use $kernel::asset_vault +use $kernel::callbacks +use $kernel::callbacks::ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT +use $kernel::callbacks::ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT use $kernel::constants::ACCOUNT_PROCEDURE_DATA_LENGTH use $kernel::constants::EMPTY_SMT_ROOT use $kernel::constants::STORAGE_SLOT_TYPE_MAP @@ -10,7 +13,7 @@ use $kernel::memory::ACCT_ID_SUFFIX_OFFSET use $kernel::memory::ACCT_ID_PREFIX_OFFSET use miden::core::collections::smt use miden::core::collections::sorted_array -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem use miden::core::word @@ -61,6 +64,8 @@ const ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE="unknown account storage mode in accou const ERR_ACCOUNT_READING_MAP_VALUE_FROM_NON_MAP_SLOT="failed to read an account map item from a non-map storage slot" +const ERR_FOREIGN_ACCOUNT_ID_MISMATCH="foreign account ID provided with advice map doesn't match the ID provided by the operand stack" + # CONSTANTS # ================================================================================================= @@ -301,17 +306,18 @@ pub proc compute_commitment # => [] # prepare the stack for computing the account commitment - exec.memory::get_active_account_data_ptr padw padw padw - # => [RATE, RATE, PERM, account_data_ptr] + exec.memory::get_active_account_data_ptr + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, account_data_ptr] # stream account data and compute sequential hash. We perform two `mem_stream` operations # because the account data consists of exactly 4 words. - mem_stream exec.rpo256::permute - mem_stream exec.rpo256::permute - # => [RATE, RATE, PERM, account_data_ptr'] + mem_stream exec.poseidon2::permute + mem_stream exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY, account_data_ptr'] # extract account commitment - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [ACCOUNT_COMMITMENT, account_data_ptr'] # drop account_data_ptr @@ -409,11 +415,11 @@ end #! Gets an item from the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! @@ -422,9 +428,9 @@ end pub proc get_item # get account storage slots section offset exec.memory::get_account_active_storage_slots_section_ptr - # => [acct_storage_slots_section_offset, slot_id_prefix, slot_id_suffix] + # => [acct_storage_slots_section_offset, slot_id_suffix, slot_id_prefix] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr] # get the item from storage @@ -432,13 +438,50 @@ pub proc get_item # => [VALUE] end +#! Finds an item in the active account's storage by slot ID, returning whether the slot was found +#! along with its value. +#! +#! Unlike `get_item`, this procedure does not panic if the slot does not exist. Instead, it +#! returns `is_found = 0` and the empty word. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix] +#! Outputs: [is_found, VALUE] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are +#! the first two felts of the hashed slot name. +#! - is_found is 1 if the slot was found, 0 otherwise. +#! - VALUE is the value of the item, or the empty word if the slot was not found. +pub proc find_item + # get account storage slots section offset + exec.memory::get_account_active_storage_slots_section_ptr + # => [acct_storage_slots_section_offset, slot_id_suffix, slot_id_prefix] + + exec.find_storage_slot + # => [is_found, slot_ptr] + + if.true + # slot was found, read its value + exec.get_item_raw + # => [VALUE] + + push.1 + # => [is_found = 1, VALUE] + else + # slot was not found, drop slot_ptr and return empty word + drop padw push.0 + # => [is_found = 0, EMPTY_WORD] + end + # => [is_found, VALUE] +end + #! Gets an item and its slot type from the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE, slot_type] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! - slot_type is the type of the slot. @@ -448,9 +491,9 @@ end pub proc get_typed_item # get account storage slots section offset exec.memory::get_account_active_storage_slots_section_ptr - # => [acct_storage_slots_section_offset, slot_id_prefix, slot_id_suffix] + # => [acct_storage_slots_section_offset, slot_id_suffix, slot_id_prefix] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr] dup add.ACCOUNT_SLOT_TYPE_OFFSET mem_load @@ -463,11 +506,11 @@ end #! Gets an item from the account storage at its initial state (beginning of transaction). #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. #! @@ -476,9 +519,9 @@ end pub proc get_initial_item # get account initial storage slots section offset exec.memory::get_account_initial_storage_slots_ptr - # => [account_initial_storage_slots_ptr, slot_id_prefix, slot_id_suffix] + # => [account_initial_storage_slots_ptr, slot_id_suffix, slot_id_prefix] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr] # get the item from initial storage @@ -486,13 +529,13 @@ pub proc get_initial_item # => [INIT_VALUE] end -#! Sets an item in the account storage. +#! Sets an item in the account storage of the native account. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value to set. #! - OLD_VALUE is the previous value of the item. @@ -502,12 +545,12 @@ end #! - the storage slot type is not value. pub proc set_item emit.ACCOUNT_STORAGE_BEFORE_SET_ITEM_EVENT - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] - exec.memory::get_account_active_storage_slots_section_ptr - # => [storage_slots_ptr, slot_id_prefix, slot_id_suffix, VALUE] + exec.memory::get_native_account_active_storage_slots_ptr + # => [storage_slots_ptr, slot_id_suffix, slot_id_prefix, VALUE] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr, VALUE] # load the slot type @@ -540,11 +583,11 @@ end #! Returns the VALUE located under the specified KEY within the map contained in the account #! storage slot identified by the slot ID. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the map item at KEY. #! @@ -553,7 +596,7 @@ end #! - the requested storage slot type is not map. pub proc get_map_item exec.memory::get_account_active_storage_slots_section_ptr - # => [storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY] + # => [storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY] exec.get_map_item_raw end @@ -561,11 +604,11 @@ end #! Returns the VALUE located under the specified KEY within the map contained in the given #! account storage slot at its initial state (beginning of transaction). #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the map item at KEY at the beginning of the transaction. #! @@ -574,18 +617,19 @@ end #! - the requested storage slot type is not map. pub proc get_initial_map_item exec.memory::get_account_initial_storage_slots_ptr - # => [initial_storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY] + # => [initial_storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY] exec.get_map_item_raw end -#! Stores NEW_VALUE under the specified KEY within the map contained in the given account storage slot. +#! Stores NEW_VALUE under the specified KEY within the map contained in the specified storage slot +#! of the native account. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - NEW_VALUE is the value to set under KEY. @@ -597,11 +641,11 @@ end #! - the storage slot type is not map. #! - no map with the root of the slot is found. pub proc set_map_item - exec.memory::get_account_active_storage_slots_section_ptr - # => [storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE] + exec.memory::get_native_account_active_storage_slots_ptr + # => [storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE] # resolve the slot name to its pointer - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr, KEY, NEW_VALUE] # load the slot type @@ -617,7 +661,7 @@ pub proc set_map_item # => [OLD_VALUE] end -#! Returns the type of the storage slot at the provided index. +#! Returns the type of the storage slot at the provided index for the native account. #! #! WARNING: The index must be in bounds. #! @@ -627,7 +671,7 @@ end #! Where: #! - index is the location in memory of the storage slot. #! - slot_type is the type of the storage slot. -pub proc get_storage_slot_type +pub proc get_native_storage_slot_type # convert the index into a memory offset mul.ACCOUNT_STORAGE_SLOT_DATA_LENGTH # => [offset] @@ -647,15 +691,16 @@ end #! Adds the specified asset to the account vault. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE'] #! #! Where: -#! - ASSET is the asset that is added to the vault. -#! - ASSET' final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset that is added to the vault. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. @@ -663,73 +708,90 @@ end #! added. #! - the vault already contains the same non-fungible asset. pub proc add_asset_to_vault - # duplicate the ASSET to be able to emit an event after an asset is being added - dupw - # => [ASSET, ASSET] + swapw dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY] - # fetch the account vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [ASSET, acct_vault_root_ptr, ASSET] + exec.callbacks::on_before_asset_added_to_account + swapw + # => [ASSET_KEY, PROCESSED_ASSET_VALUE] + + # duplicate the asset for the later event and delta update + dupw.1 dupw.1 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, ASSET_KEY, PROCESSED_ASSET_VALUE] + + # push the account vault root ptr + exec.memory::get_account_vault_root_ptr movdn.8 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, PROCESSED_ASSET_VALUE] # emit event to signal that an asset is going to be added to the account vault emit.ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, PROCESSED_ASSET_VALUE] # add the asset to the account vault exec.asset_vault::add_asset - # => [ASSET', ASSET] - - swapw - # => [ASSET, ASSET'] + # => [PROCESSED_ASSET_VALUE', ASSET_KEY, PROCESSED_ASSET_VALUE] - dupw exec.account_delta::add_asset - # => [ASSET, ASSET'] + movdnw.2 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, PROCESSED_ASSET_VALUE'] # emit event to signal that an asset is being added to the account vault emit.ACCOUNT_VAULT_AFTER_ADD_ASSET_EVENT - dropw - # => [ASSET'] + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, PROCESSED_ASSET_VALUE'] + + exec.account_delta::add_asset + # => [PROCESSED_ASSET_VALUE'] end -#! Removes the specified asset from the account vault. +#! Removes the specified asset from the account vault and returns the remaining asset value. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [REMAINING_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. +#! - REMAINING_ASSET_VALUE is the value of the asset remaining in the vault after removal. #! #! Panics if: #! - the fungible asset is not found in the vault. #! - the amount of the fungible asset in the vault is less than the amount to be removed. #! - the non-fungible asset is not found in the vault. pub proc remove_asset_from_vault - # fetch the vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [ASSET, acct_vault_root_ptr] + # duplicate the asset for the later event and delta update + dupw.1 dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + # push the vault root ptr + exec.memory::get_account_vault_root_ptr movdn.8 + # => [ASSET_KEY, ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, ASSET_VALUE] # emit event to signal that an asset is going to be removed from the account vault emit.ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_EVENT + # => [ASSET_KEY, ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, ASSET_VALUE] # remove the asset from the account vault exec.asset_vault::remove_asset - # => [ASSET] + # => [REMAINING_ASSET_VALUE, ASSET_KEY, ASSET_VALUE] - dupw exec.account_delta::remove_asset - # => [ASSET] + movdnw.2 + # => [ASSET_KEY, ASSET_VALUE, REMAINING_ASSET_VALUE] # emit event to signal that an asset is being removed from the account vault emit.ACCOUNT_VAULT_AFTER_REMOVE_ASSET_EVENT - # => [ASSET] + # => [ASSET_KEY, ASSET_VALUE, REMAINING_ASSET_VALUE] + + exec.account_delta::remove_asset + # => [REMAINING_ASSET_VALUE] end -#! Returns the ASSET associated with the provided asset vault key in the active account's vault. +#! Returns the ASSET_VALUE associated with the provided asset vault key in the active account's vault. #! #! Inputs: [ASSET_KEY] -#! Outputs: [ASSET] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset from the vault, which can be the EMPTY_WORD if it isn't present. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. pub proc get_asset # get the vault root ptr exec.memory::get_account_vault_root_ptr movdn.4 @@ -741,18 +803,18 @@ pub proc get_asset # get the asset exec.asset_vault::get_asset - # => [ASSET] + # => [ASSET_VALUE] end -#! Returns the ASSET associated with the provided asset vault key in the active account's vault at +#! Returns the ASSET_VALUE associated with the provided asset vault key in the active account's vault at #! the beginning of the transaction. #! #! Inputs: [ASSET_KEY] -#! Outputs: [ASSET] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset from the vault, which can be the EMPTY_WORD if it isn't present. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. pub proc get_initial_asset # get the vault root associated with the initial vault root of the native account exec.memory::get_account_initial_vault_root_ptr movdn.4 @@ -764,7 +826,7 @@ pub proc get_initial_asset # get the asset exec.asset_vault::get_asset - # => [ASSET] + # => [ASSET_VALUE] end # CODE @@ -843,60 +905,49 @@ pub proc validate_seed # Compute the hash of (SEED, CODE_COMMITMENT, STORAGE_COMMITMENT, EMPTY_WORD). # --------------------------------------------------------------------------------------------- - # push an empty word as padding for account ID hashing - padw - # => [EMPTY_WORD] + # initialize capacity of the hasher and rate1 with the code commitment + padw exec.memory::get_account_code_commitment + # => [CODE_COMMITMENT, CAPACITY] # push the advice map key at which the seed is located exec.memory::get_native_account_id exec.create_id_key - # => [ACCOUNT_ID_KEY, EMPTY_WORD] + # => [ACCOUNT_ID_KEY, CODE_COMMITMENT, CAPACITY] - # populate first four elements of the rate with the account ID seed + # overwrite ACCOUNT_ID_KEY with the SEED adv.push_mapval adv_loadw - # => [SEED, EMPTY_WORD] - - # pad capacity element of hasher - padw swapw - # => [SEED, 0, 0, 0, 0, EMPTY_WORD] + # => [SEED, CODE_COMMITMENT, CAPACITY] - # populate last four elements of the hasher rate with the code commitment - exec.memory::get_account_code_commitment - # => [CODE_COMMITMENT, SEED, 0, 0, 0, 0, EMPTY_WORD] - - # perform first permutation of seed and code_commitment (from advice stack) - # perm(seed, code_commitment) - exec.rpo256::permute - # => [RATE, RATE, PERM, EMPTY_WORD] + # perform first permutation over (SEED, CODE_COMMITMENT) + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] # clear rate elements dropw dropw - # => [PERM, EMPTY_WORD] - - # perform second permutation perm(storage_commitment, 0, 0, 0, 0) - swapw exec.memory::get_account_storage_commitment swapw - # => [EMPTY_WORD, STORAGE_COMMITMENT, PERM] + # => [CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, CAP] + # perform second permutation over (STORAGE_COMMITMENT, EMPTY_WORD) + padw exec.memory::get_account_storage_commitment + # => [STORAGE_COMMITMENT, EMPTY_WORD, CAPACITY] - # extract digest - exec.rpo256::squeeze_digest + exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [DIGEST] + # => [digest0, digest1, digest2, digest3] # Shape suffix to set the lower 8 bits to zero and compare the computed and provided ID. # --------------------------------------------------------------------------------------------- - # extract account ID from digest - drop drop swap - # => [hashed_account_id_prefix, hashed_account_id_suffix] + # extract account ID from digest where suffix = digest0 and prefix = digest1 + movup.2 drop movup.2 drop + # => [hashed_account_id_suffix, hashed_account_id_prefix] exec.memory::get_account_id movdn.3 movdn.3 - # => [hashed_account_id_prefix, hashed_account_id_suffix, account_id_prefix, account_id_suffix] + # => [hashed_account_id_suffix, hashed_account_id_prefix, account_id_suffix, account_id_prefix] # shape suffix of hashed id by setting the lower 8 bits to zero - swap exec.account_id::shape_suffix swap - # => [hashed_account_id_prefix, hashed_account_id_suffix, account_id_prefix, account_id_suffix] + exec.account_id::shape_suffix + # => [hashed_account_id_suffix, hashed_account_id_prefix, account_id_suffix, account_id_prefix] # assert the account ID matches the account ID of the new account exec.account_id::is_equal assert.err=ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH @@ -936,15 +987,15 @@ pub proc validate_storage # => [curr_slot_idx] dup exec.get_slot_id - # => [curr_slot_id_prefix, curr_slot_id_suffix, curr_slot_idx] + # => [curr_slot_id_suffix, curr_slot_id_prefix, curr_slot_idx] # we are guaranteed to not underflow because curr_slot_idx is at least 1 at the # beginning of the loop dup.2 sub.1 - # => [prev_slot_idx, curr_slot_id_prefix, curr_slot_id_suffix, curr_slot_idx] + # => [prev_slot_idx, curr_slot_id_suffix, curr_slot_id_prefix, curr_slot_idx] exec.get_slot_id - # => [prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_prefix, curr_slot_id_suffix, curr_slot_idx] + # => [prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_suffix, curr_slot_id_prefix, curr_slot_idx] # this effectively checks that slots are sorted _and_ unique, since duplicate slot IDs are # not less than each other @@ -969,31 +1020,28 @@ end #! #! This procedure is public so it can be tested. #! -#! Inputs: [prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_prefix, curr_slot_id_suffix] +#! Inputs: [prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_suffix, curr_slot_id_prefix] #! Outputs: [is_prev_lt_curr] pub proc is_slot_id_lt movup.2 - # => [curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_suffix] - - # compute prev == curr for prefix - dup dup.2 eq - # => [is_prefix_eq, curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_suffix] - - movdn.4 - # => [curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_suffix, is_prefix_eq] + # => [curr_slot_id_suffix, prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_prefix] - # compute prev < curr for prefix + # compute prev < curr for suffix lt - # => [is_prev_lt_curr_prefix, prev_slot_id_suffix, curr_slot_id_suffix, is_prefix_eq] + # => [is_prev_lt_curr_suffix, prev_slot_id_prefix, curr_slot_id_prefix] - swap.2 - # => [curr_slot_id_suffix, prev_slot_id_suffix, is_prev_lt_curr_prefix, is_prefix_eq] + movdn.2 + # => [prev_slot_id_prefix, curr_slot_id_prefix, is_prev_lt_curr_suffix] - # compute prev < curr for suffix + dup dup.2 + # => [curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_prefix, curr_slot_id_prefix, is_prev_lt_curr_suffix] + + # compute prev < curr for prefix lt - # => [is_prev_lt_curr_suffix, is_prev_lt_curr_prefix, is_prefix_eq] + # => [is_prev_lt_curr_prefix, prev_slot_id_prefix, curr_slot_id_prefix, is_prev_lt_curr_suffix] - movup.2 + # compute prev == curr for prefix + movdn.3 eq # => [is_prefix_eq, is_prev_lt_curr_suffix, is_prev_lt_curr_prefix] # compute result as is_prefix_lt || (is_suffix_lt && is_prefix_eq) @@ -1009,10 +1057,10 @@ end #! Loads account data from the advice inputs into the _active_ account's memory section. #! #! Inputs: -#! Operand stack: [account_id_prefix, account_id_suffix] +#! Operand stack: [account_id_suffix, account_id_prefix] #! Advice map: { -#! ACCOUNT_ID: [[account_id_suffix, account_id_prefix, 0, account_nonce], -#! VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT], +#! ACCOUNT_ID_KEY: [[account_id_suffix, account_id_prefix, 0, account_nonce], +#! VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT], #! STORAGE_COMMITMENT: [[STORAGE_SLOT_DATA]], #! CODE_COMMITMENT: [[ACCOUNT_PROCEDURE_DATA]], #! } @@ -1020,12 +1068,8 @@ end #! Operand stack: [] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the account. -#! - ACCOUNT_ID is the word constructed from the account_id as follows: -#! [0, 0, account_id_prefix, account_id_suffix]. Notice that the actual advice map -#! key is reversed: [account_id_suffix, account_id_prefix, 0, 0]. That is the specificity of the -#! `adv.push_mapval` instruction which takes the top stack word in the big-endian (reversed) -#! order. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the ID of the account. +#! - ACCOUNT_ID_KEY is the map key constructed from the account ID as done by create_id_key. #! - account_nonce is the nonce of the account. #! - VAULT_ROOT is the commitment of the account's vault. #! - STORAGE_COMMITMENT is the commitment to the account's storage. @@ -1039,26 +1083,40 @@ end #! - the computed account code commitment does not match the provided account code commitment. #! - the number of account storage slots exceeded the maximum limit of 255. #! - the computed account storage commitment does not match the provided account storage commitment. +#! - the foreign account ID obtained from the advice map doesn't match the ID provided to the +#! procedure. pub proc load_foreign_account emit.ACCOUNT_BEFORE_FOREIGN_LOAD_EVENT - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] # construct the advice map key from the account ID to load the core account data - exec.create_id_key - # OS => [ACCOUNT_ID_KEY] + dup.1 dup.1 exec.create_id_key + # OS => [ACCOUNT_ID_KEY, account_id_suffix, account_id_prefix] # move the core account data to the advice stack adv.push_mapval - # OS => [ACCOUNT_ID_KEY] + # OS => [ACCOUNT_ID_KEY, account_id_suffix, account_id_prefix] # AS => [ACCOUNT_ID_AND_NONCE, VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] - # store the id and nonce of the foreign account to memory + # load the id and nonce of the foreign account from the advice stack overwriting the ID key adv_loadw - # OS => [account_id_prefix, account_id_suffix, 0, account_nonce] + # OS => [account_nonce, 0, adv_account_id_suffix, adv_account_id_prefix, account_id_suffix, account_id_prefix] # AS => [VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] + movup.5 movup.5 + # OS => [account_id_suffix, account_id_prefix, account_nonce, 0, adv_account_id_suffix, adv_account_id_prefix] + # AS => [VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] + + # check that the account ID provided by the advice map matches the one provided by the operand + # stack + dup.5 dup.5 + exec.account_id::is_equal assert.err=ERR_FOREIGN_ACCOUNT_ID_MISMATCH + # OS => [account_nonce, 0, account_id_suffix, account_id_prefix] + # AS => [VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] + + # store the id and nonce of the foreign account to memory exec.memory::set_account_id_and_nonce - # OS => [account_nonce, 0, account_id_prefix, account_id_suffix] + # OS => [account_nonce, 0, account_id_suffix, account_id_prefix] # AS => [VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] # store the vault root of the foreign account to the memory @@ -1139,18 +1197,18 @@ pub proc save_account_storage_data # OS => [acct_storage_slots_ptr, end_ptr, STORAGE_COMMITMENT] # AS => [[STORAGE_SLOT_DATA]] - # pad stack before reading from advice stack - padw padw padw - # OS => [PAD, PAD, PAD, acct_storage_slots_ptr, end_ptr, STORAGE_COMMITMENT] + # initialize hasher state before reading from advice stack + exec.poseidon2::init_no_padding + # OS => [RATE0, RATE1, CAPACITY, acct_storage_slots_ptr, end_ptr, STORAGE_COMMITMENT] # AS => [[STORAGE_SLOT_DATA]] # read the data from advice stack to memory and hash exec.mem::pipe_double_words_to_memory - # OS => [PERM, PERM, PERM, end_ptr', STORAGE_COMMITMENT] + # OS => [RATE0, RATE1, CAPACITY, end_ptr', STORAGE_COMMITMENT] # AS => [] # extract the digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [DIGEST, end_ptr', STORAGE_COMMITMENT] # drop end_ptr @@ -1220,11 +1278,11 @@ pub proc save_account_procedure_data # read the data from advice stack to memory and hash exec.mem::pipe_words_to_memory - # OS => [PERM, PERM, PERM, end_ptr', CODE_COMMITMENT] + # OS => [RATE0, RATE1, CAPACITY, end_ptr', CODE_COMMITMENT] # AS => [] # extract the digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [DIGEST, end_ptr', CODE_COMMITMENT] # drop end_ptr @@ -1256,7 +1314,7 @@ pub proc insert_new_storage sub.1 # => [slot_idx] - dup exec.get_storage_slot_type + dup exec.get_native_storage_slot_type # => [slot_type, slot_idx] push.STORAGE_SLOT_TYPE_MAP eq @@ -1342,7 +1400,7 @@ proc insert_and_validate_storage_map # => [remaining_entries, slot_ptr, MAP_ROOT] # push a key-value pair (8 felts) to the operand stack - adv_push.8 + padw adv_loadw padw adv_loadw # => [KEY, VALUE, remaining_entries, slot_ptr, MAP_ROOT] dup.9 @@ -1381,11 +1439,11 @@ end #! WARNING: The index must be in bounds. #! #! Inputs: [index] -#! Outputs: [INITIAL_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix] +#! Outputs: [INITIAL_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix] #! #! Where: #! - index is the index of the slot. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INITIAL_VALUE is the initial value of the item at the beginning of the transaction. #! - CURRENT_VALUE is the current value of the item. @@ -1402,25 +1460,25 @@ pub proc get_item_delta # => [slot_ptr, slot_ptr, offset] # load the slot ID - add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load - # => [slot_id_suffix, slot_ptr, offset] + add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load + # => [slot_id_prefix, slot_ptr, offset] - dup.1 add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load - # => [slot_id_prefix, slot_id_suffix, slot_ptr, offset] + dup.1 add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load + # => [slot_id_suffix, slot_id_prefix, slot_ptr, offset] # load the current value movup.2 exec.get_item_raw - # => [CURRENT_VALUE, slot_id_prefix, slot_id_suffix, offset] + # => [CURRENT_VALUE, slot_id_suffix, slot_id_prefix, offset] # get account initial storage slots section offset exec.memory::get_account_initial_storage_slots_ptr - # => [init_storage_slots_ptr, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, offset] + # => [init_storage_slots_ptr, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, offset] movup.7 add - # => [init_slot_ptr, CURRENT_VALUE, slot_id_prefix, slot_id_suffix] + # => [init_slot_ptr, CURRENT_VALUE, slot_id_suffix, slot_id_prefix] exec.get_item_raw - # => [INITIAL_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix] + # => [INITIAL_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix] end #! Gets the slot ID of the storage slot at the provided index. @@ -1428,11 +1486,11 @@ end #! WARNING: The index must be in bounds. #! #! Inputs: [index] -#! Outputs: [slot_id_prefix, slot_id_suffix] +#! Outputs: [slot_id_suffix, slot_id_prefix] #! #! Where: #! - index is the index of the slot. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. pub proc get_slot_id # convert the index into a memory offset @@ -1443,11 +1501,11 @@ pub proc get_slot_id add # => [slot_ptr] - dup add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load - # => [slot_id_suffix, slot_ptr] + dup add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load + # => [slot_id_prefix, slot_ptr] - swap add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load - # => [slot_id_prefix, slot_id_suffix] + swap add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load + # => [slot_id_suffix, slot_id_prefix] end #! Sets the value of the storage slot located at the memory address specified by the provided @@ -1464,7 +1522,7 @@ end #! - slot_ptr is the pointer to a slot. #! - VALUE is the new value of the item. proc set_item_raw - add.ACCOUNT_SLOT_VALUE_OFFSET mem_storew_be dropw + add.ACCOUNT_SLOT_VALUE_OFFSET mem_storew_le dropw # => [] # set the storage commitment dirty flag to indicate that the commitment is outdated @@ -1521,7 +1579,7 @@ proc set_map_item_raw # => [OLD_VALUE, NEW_ROOT, KEY, NEW_VALUE] # store OLD_VALUE until the end of the procedure - loc_storew_be.4 swapw + loc_storew_le.4 swapw # => [NEW_ROOT, OLD_VALUE, KEY, NEW_VALUE] # store NEW_ROOT into the map slot's VALUE @@ -1546,7 +1604,7 @@ proc set_map_item_raw # => [] # load OLD_VALUE as return value on the stack - padw loc_loadw_be.4 + padw loc_loadw_le.4 # => [OLD_VALUE] end @@ -1567,19 +1625,19 @@ proc get_item_raw # => [slot_value_ptr] # load the item from memory - padw movup.4 mem_loadw_be + padw movup.4 mem_loadw_le # => [VALUE] end #! Finds the storage map root in the storage slot with the provided name in the provided storage #! slots section and returns the VALUE associated with the KEY in the corresponding map. #! -#! Inputs: [storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] #! #! Where: #! - KEY is the key to look up in the map. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the map item at KEY. #! @@ -1587,7 +1645,7 @@ end #! - a slot with the provided slot ID does not exist in account storage. #! - the requested storage slot type is not map. proc get_map_item_raw - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr, KEY] emit.ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_EVENT @@ -1619,37 +1677,53 @@ proc get_map_item_raw # => [VALUE] end -#! Finds the slot identified by the key [slot_id_prefix, slot_id_suffix, 0, 0] (stack order) and -#! returns the pointer to that slot. +#! Finds the slot identified by the key [_, _, slot_id_suffix, slot_id_prefix] and returns a flag +#! indicating whether the slot was found and the pointer to that slot. #! -#! Inputs: [storage_slots_ptr, slot_id_prefix, slot_id_suffix] -#! Outputs: [slot_ptr] +#! Inputs: [storage_slots_ptr, slot_id_suffix, slot_id_prefix] +#! Outputs: [is_found, slot_ptr] #! #! Where: #! - storage_slots_ptr is the pointer to the storage slots section. +#! - is_found is 1 if the slot was found, 0 otherwise. #! - slot_ptr is the pointer to the resolved storage slot. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. -#! -#! Panics if: -#! - a slot with the provided slot ID does not exist in account storage. proc find_storage_slot # construct the start and end pointers of the storage slot section in which we will search dup exec.memory::get_num_storage_slots mul.ACCOUNT_STORAGE_SLOT_DATA_LENGTH add - # => [storage_slots_end_ptr, storage_slots_start_ptr, slot_id_prefix, slot_id_suffix] + # => [storage_slots_end_ptr, storage_slots_start_ptr, slot_id_suffix, slot_id_prefix] - movdn.3 movdn.2 - # => [slot_id_prefix, slot_id_suffix, storage_slots_start_ptr, storage_slots_end_ptr] + swap movup.3 movup.3 + # => [slot_id_suffix, slot_id_prefix, storage_slots_start_ptr, storage_slots_end_ptr] - # find the slot whose slot key matches [slot_id_prefix, slot_id_suffix, 0, 0] # if the slot key does not exist, this procedure will validate its absence exec.sorted_array::find_half_key_value # => [is_slot_found, slot_ptr, storage_slots_start_ptr, storage_slots_end_ptr] - assert.err=ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME - # => [slot_ptr, storage_slots_start_ptr, storage_slots_end_ptr] + movup.2 drop movup.2 drop + # => [is_slot_found, slot_ptr] +end - swap.2 drop drop +#! Finds the slot identified by the key [_, _, slot_id_suffix, slot_id_prefix] and returns the +#! pointer to that slot. +#! +#! Inputs: [storage_slots_ptr, slot_id_suffix, slot_id_prefix] +#! Outputs: [slot_ptr] +#! +#! Where: +#! - storage_slots_ptr is the pointer to the storage slots section. +#! - slot_ptr is the pointer to the resolved storage slot. +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are +#! the first two felts of the hashed slot name. +#! +#! Panics if: +#! - a slot with the provided slot ID does not exist in account storage. +proc get_storage_slot_ptr + exec.find_storage_slot + # => [is_found, slot_ptr] + + assert.err=ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME # => [slot_ptr] end @@ -1696,18 +1770,18 @@ proc get_procedure_root # => [proc_ptr] # load procedure root from memory - padw movup.4 mem_loadw_be + padw movup.4 mem_loadw_le # => [PROC_ROOT] end #! Returns the pointer to the next vacant memory slot if the account was not loaded before, and the #! pointer to the account data otherwise. #! -#! Inputs: [foreign_account_id_prefix, foreign_account_id_suffix] -#! Outputs: [was_loaded, ptr, foreign_account_id_prefix, foreign_account_id_suffix] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix] +#! Outputs: [was_loaded, ptr, foreign_account_id_suffix, foreign_account_id_prefix] #! #! Where: -#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign +#! - foreign_account_id_{suffix,prefix} are the suffix and prefix felts of the ID of the foreign #! account whose procedure is going to be executed. #! - was_loaded is the binary flag indicating whether the foreign account was already loaded to the #! memory. @@ -1720,11 +1794,11 @@ pub proc get_account_data_ptr # move pointer one account block back so that the first account pointer in the cycle will point # to the native account exec.memory::get_native_account_data_ptr exec.memory::get_account_data_length sub - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix] # push the pad element onto the stack: it will represent the `is_equal_id` flag during the cycle push.0 movdn.3 - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix, is_equal_id=0] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix, is_equal_id=0] # push the flag to enter the loop push.1 @@ -1733,40 +1807,40 @@ pub proc get_account_data_ptr # drop the flag left from the previous loop # in the first iteration this will be a pad element movup.3 drop - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix] # move the current account pointer to the next account data block exec.memory::get_account_data_length add - # => [curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + # => [curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] - dup add.ACCT_ID_SUFFIX_OFFSET mem_load - dup.1 add.ACCT_ID_PREFIX_OFFSET mem_load - # => [account_id_prefix, account_id_suffix, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + dup add.ACCT_ID_PREFIX_OFFSET mem_load + dup.1 add.ACCT_ID_SUFFIX_OFFSET mem_load + # => [account_id_suffix, account_id_prefix, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] # check whether the ID is equal to zero, if so it means this memory block was not yet initialized dup.1 dup.1 push.0 push.0 exec.account_id::is_equal - # => [is_empty_block, account_id_prefix, account_id_suffix, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + # => [is_empty_block, account_id_suffix, account_id_prefix, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] # check whether the current id matches the foreign id movdn.2 dup.5 dup.5 exec.account_id::is_equal - # => [is_equal_id, is_empty_word, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + # => [is_equal_id, is_empty_block, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] # get the loop flag # it equals 1 if both `is_equal_id` and `is_empty_block` flags are equal to 0, so we should # continue iterating dup movdn.5 or not - # => [loop_flag, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix, is_equal_id] + # => [loop_flag, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix, is_equal_id] end # check that the loading of one more account won't exceed the maximum number of the foreign # accounts which can be loaded. dup exec.memory::get_max_foreign_account_ptr lte assert.err=ERR_FOREIGN_ACCOUNT_MAX_NUMBER_EXCEEDED - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix, is_equal_id] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix, is_equal_id] # the resulting `was_loaded` flag is essentially equal to the `is_equal_id` flag movup.3 - # => [was_loaded, curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # => [was_loaded, curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix] end #! Checks that the state of the active foreign account is valid. @@ -1811,7 +1885,7 @@ end #! Inputs: [KEY] #! Outputs: [HASHED_KEY] proc hash_map_key - exec.rpo256::hash + exec.poseidon2::hash # => [HASHED_KEY] end @@ -1845,15 +1919,15 @@ proc refresh_storage_commitment # => [start_ptr, end_ptr] # pad stack to read and hash from memory - padw padw padw - # => [PAD, PAD, PAD, start_ptr, end_ptr] + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, start_ptr, end_ptr] # hash elements from memory - exec.rpo256::absorb_double_words_from_memory - # => [PERM, PERM, PERM, start_ptr, end_ptr] + exec.poseidon2::absorb_double_words_from_memory + # => [RATE0, RATE1, CAPACITY, start_ptr, end_ptr] # extract the digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [DIGEST, end_ptr, end_ptr] # clean stack @@ -1961,7 +2035,7 @@ pub proc has_procedure # => [PROC_ROOT, curr_proc_ptr, end_ptr, is_procedure_available] # load the root of the current procedure - padw dup.8 mem_loadw_be + padw dup.8 mem_loadw_le # => [CURR_PROC_ROOT, PROC_ROOT, curr_proc_ptr, end_ptr, is_procedure_available] # check whether the current root is equal to the provided root @@ -1994,17 +2068,64 @@ pub proc has_procedure # => [is_procedure_available'] end -#! Returns the key build from the provided account ID for use in the advice map or the account +# CALLBACKS +# ------------------------------------------------------------------------------------------------- + +#! Returns whether the active account defines callbacks. +#! +#! Inputs: [] +#! Outputs: [has_callbacks] +#! +#! Where: +#! - has_callbacks is 1 if the account defines callbacks, 0 otherwise. +pub proc has_callbacks + # check if the on_before_asset_added_to_account callback slot exists and is non-empty + push.ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT[0..2] + exec.has_non_empty_slot + # => [has_account_callback] + + # check if the on_before_asset_added_to_note callback slot exists and is non-empty + push.ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT[0..2] + exec.has_non_empty_slot + # => [has_note_callback, has_account_callback] + + or + # => [has_callbacks] +end + +#! Checks whether a storage slot with the given slot ID exists in the active account's storage +#! and has a non-empty value. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix] +#! Outputs: [has_non_empty_value] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are +#! the first two felts of the hashed slot name. +#! - has_non_empty_value is 1 if the slot exists and its value is non-empty, 0 otherwise. +proc has_non_empty_slot + exec.find_item + # => [is_found, VALUE] + + # check if is_found && value is non-empty + movdn.4 exec.word::eqz not + # => [is_non_empty_value, is_found] + + and + # => [has_non_empty_value] +end + +#! Returns the key built from the provided account ID for use in the advice map or the account #! tree. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [ACCOUNT_ID_KEY] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the account ID. #! - ACCOUNT_ID_KEY is the key word built from the provided account ID. proc create_id_key - push.0 movdn.2 push.0 movdn.2 - # => [account_id_prefix, account_id_suffix, 0, 0] + push.0.0 + # => [0, 0, account_id_suffix, account_id_prefix] # => [ACCOUNT_ID_KEY] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm b/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm index 969438b4f5..b2b44e81c3 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm @@ -1,11 +1,13 @@ use $kernel::account use $kernel::asset -use $kernel::asset_vault use $kernel::constants::STORAGE_SLOT_TYPE_VALUE +use $kernel::fungible_asset use $kernel::link_map use $kernel::memory +use $kernel::memory::ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR +use $kernel::memory::ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::word # ERRORS @@ -45,39 +47,35 @@ const FELT_MAX = 0xffffffff00000000 #! Panics if: #! - the vault or storage delta is not empty but the nonce increment is zero. pub proc compute_commitment - # pad capacity element of the hasher - padw - # => [CAPACITY] - - exec.was_nonce_incremented push.0 - # => [0, was_nonce_incremented, CAPACITY] - # note that the delta of the nonce is equal to was_nonce_incremented - # => [0, nonce_delta, CAPACITY] + # pad capacity and RATE1 of the hasher with empty words + padw padw + # => [EMPTY_WORD, CAPACITY] exec.memory::get_native_account_id - # => [native_acct_id_prefix, native_acct_id_suffix, 0, nonce_delta, CAPACITY] - # => [ID_AND_NONCE, CAPACITY] + # => [native_acct_id_suffix, native_acct_id_prefix, EMPTY_WORD, CAPACITY] - padw - # => [EMPTY_WORD, ID_AND_NONCE, CAPACITY] + # the delta of the nonce is equal to was_nonce_incremented + push.0 exec.was_nonce_incremented + # => [nonce_delta, 0, native_acct_id_suffix, native_acct_id_prefix, EMPTY_WORD, CAPACITY] + # => [ID_AND_NONCE, EMPTY_WORD, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] - # save the ID and nonce digest (the 2nd rate word) for a later check - dupw.1 movdnw.3 - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # save the ID and nonce digest (RATE0 word) for a later check + exec.poseidon2::copy_digest movdnw.3 + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] exec.update_fungible_asset_delta - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] exec.update_non_fungible_asset_delta - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] exec.update_storage_delta - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [DELTA_COMMITMENT, ID_AND_NONCE_DIGEST] exec.was_nonce_incremented not @@ -102,118 +100,114 @@ end #! Updates the given delta hasher with the storage slots. #! -#! Inputs: [RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] proc update_storage_delta exec.memory::get_num_storage_slots movdn.12 - # => [RATE, RATE, PERM, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, num_storage_slots] push.0 movdn.12 - # => [RATE, RATE, PERM, slot_idx = 0, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, slot_idx = 0, num_storage_slots] # loop if num_storage_slots != 0 dup.13 neq.0 - # => [should_loop, RATE, RATE, PERM, slot_idx, num_storage_slots] + # => [should_loop, RATE0, RATE1, CAPACITY, slot_idx, num_storage_slots] while.true dup.12 - # => [slot_idx, RATE, RATE, PERM, slot_idx, num_storage_slots] + # => [slot_idx, RATE0, RATE1, CAPACITY, slot_idx, num_storage_slots] exec.update_slot_delta - # => [RATE, RATE, PERM, slot_idx, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, slot_idx, num_storage_slots] # increment slot index movup.12 add.1 - # => [next_slot_idx, RATE, RATE, PERM, num_storage_slots] + # => [next_slot_idx, RATE0, RATE1, CAPACITY, num_storage_slots] dup movdn.13 - # => [next_slot_idx, RATE, RATE, PERM, next_slot_idx, num_storage_slots] + # => [next_slot_idx, RATE0, RATE1, CAPACITY, next_slot_idx, num_storage_slots] # continue if next_slot_idx != num_storage_slots # we use neq instead of lt for efficiency dup.14 neq - # => [should_loop, RATE, RATE, PERM, next_slot_idx, num_storage_slots] + # => [should_loop, RATE0, RATE1, CAPACITY, next_slot_idx, num_storage_slots] end - # => [RATE, RATE, PERM, next_slot_idx, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, next_slot_idx, num_storage_slots] # clean the stack movup.12 drop movup.12 drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the storage slot at the provided index. #! -#! Inputs: [slot_idx, RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [slot_idx, RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] proc update_slot_delta - dup exec.account::get_storage_slot_type - # => [storage_slot_type, slot_idx, RATE, RATE, PERM] + dup exec.account::get_native_storage_slot_type + # => [storage_slot_type, slot_idx, RATE0, RATE1, CAPACITY] # check if slot is of type value push.STORAGE_SLOT_TYPE_VALUE eq - # => [is_value_slot_type, slot_idx, RATE, RATE, PERM] + # => [is_value_slot_type, slot_idx, RATE0, RATE1, CAPACITY] if.true exec.update_value_slot_delta else exec.update_map_slot_delta end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the value storage slot at the provided index. #! -#! Inputs: [slot_idx, RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [slot_idx, RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] proc update_value_slot_delta exec.account::get_item_delta - # => [INIT_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [INIT_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] exec.word::test_eq not - # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] # set was_changed to true if the account is new # generally, the delta for a new account must include all its storage slots, regardless of the # initial value and even if it is an empty word, because the initial delta for an account must # represent its full state exec.memory::is_new_account or - # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] # only include in delta if the slot's value has changed or the account is new if.true # drop init value dropw - # => [CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] # build value slot metadata - push.DOMAIN_VALUE push.0 - # => [0, domain, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + movup.5 movup.5 + # => [slot_id_suffix, slot_id_prefix, CURRENT_VALUE, RATE0, RATE1, CAPACITY] - movup.7 movup.7 - # => [slot_id_prefix, slot_id_suffix, 0, domain, CURRENT_VALUE, RATE, RATE, PERM] + push.0.DOMAIN_VALUE + # => [[domain, 0, slot_id_suffix, slot_id_prefix], CURRENT_VALUE, RATE0, RATE1, CAPACITY] # clear rate elements swapdw dropw dropw - # => [slot_id_prefix, slot_id_suffix, 0, domain, CURRENT_VALUE, PERM] - - # arrange rate words in correct order - swapw - # => [CURRENT_VALUE, slot_id_prefix, slot_id_suffix, 0, domain, PERM] + # => [[domain, 0, slot_id_suffix, slot_id_prefix], CURRENT_VALUE, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else # drop init value, current value and slot name dropw dropw drop drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the map storage slot at the provided index. #! -#! Inputs: [slot_idx, RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [slot_idx, RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] #! #! Locals: #! - 0: slot_id_suffix @@ -227,19 +221,19 @@ proc update_map_slot_delta # this is necessary because this procedure can be called multiple times and the second # invocation shouldn't reuse the first invocation's value push.0 loc_store.4 - # => [slot_idx, RATE, RATE, PERM] + # => [slot_idx, RATE0, RATE1, CAPACITY] dup exec.account::get_slot_id - # => [slot_id_prefix, slot_id_suffix, slot_idx, RATE, RATE, PERM] + # => [slot_id_suffix, slot_id_prefix, slot_idx, RATE0, RATE1, CAPACITY] - loc_store.1 loc_store.0 - # => [slot_idx, RATE, RATE, PERM] + loc_store.0 loc_store.1 + # => [slot_idx, RATE0, RATE1, CAPACITY] exec.memory::get_account_delta_storage_map_ptr - # => [account_delta_storage_map_ptr, RATE, RATE, PERM] + # => [account_delta_storage_map_ptr, RATE0, RATE1, CAPACITY] exec.link_map::iter - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] # enter loop if the link map is not empty while.true @@ -263,73 +257,73 @@ proc update_map_slot_delta # if the key-value pair has actually changed, update the hasher if.true # drop the initial value - swapw dropw - # => [NEW_VALUE, KEY, RATE, RATE, PERM] + swapw dropw swapw + # => [KEY, NEW_VALUE, RATE0, RATE1, CAPACITY] # increment number of changed entries in local loc_load.4 add.1 loc_store.4 - # => [NEW_VALUE, KEY, RATE, RATE, PERM] + # => [KEY, NEW_VALUE, RATE0, RATE1, CAPACITY] # drop previous RATE elements swapdw dropw dropw - # => [NEW_VALUE, KEY, PERM] + # => [KEY, NEW_VALUE, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else # discard the key and init and new value words loaded from the map dropw dropw dropw - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # load iter and has_next loc_load.3 - # => [iter, RATE, RATE, PERM] + # => [iter, RATE0, RATE1, CAPACITY] loc_load.2 - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] end # drop iter drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # only include the map slot metadata if there were entries in the map that resulted in an # update to the hasher state loc_load.4 neq.0 - # => [is_num_changed_entries_non_zero, RATE, RATE, PERM] + # => [is_num_changed_entries_non_zero, RATE0, RATE1, CAPACITY] # if the account is new (nonce == 0) include the map header even if it is an empty map # in order to have the delta commit to this initial storage slot. exec.memory::is_new_account or - # => [should_include_map_header, RATE, RATE, PERM] + # => [should_include_map_header, RATE0, RATE1, CAPACITY] if.true # drop the previous RATE elements dropw dropw - # => [PERM] + # => [CAPACITY] - push.DOMAIN_MAP loc_load.4 loc_load.0 loc_load.1 padw - # => [EMPTY_WORD, [slot_id_prefix, slot_id_suffix, num_changed_entries, domain], PERM] + padw loc_load.1 loc_load.0 loc_load.4 push.DOMAIN_MAP + # => [[domain, num_changed_entries, slot_id_suffix, slot_id_prefix], EMPTY_WORD, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the fungible asset vault delta. #! -#! Inputs: [RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] @locals(2) proc update_fungible_asset_delta - exec.memory::get_account_delta_fungible_asset_ptr - # => [account_delta_fungible_asset_ptr, RATE, RATE, PERM] + push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR + # => [account_delta_fungible_asset_ptr, RATE0, RATE1, CAPACITY] exec.link_map::iter - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] # enter loop if the link map is not empty while.true @@ -344,132 +338,130 @@ proc update_fungible_asset_delta movup.8 loc_store.1 # => [KEY, VALUE0, ...] # this stack state is equivalent to: - # => [[faucet_id_prefix, faucet_id_suffix, 0, 0], [delta_amount, 0, 0, 0], ...] + # => [[0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix], [delta_amount, 0, 0, 0], ...] swapw - # => [[delta_amount, 0, 0, 0], [faucet_id_prefix, faucet_id_suffix, 0, 0], ...] + # => [[delta_amount, 0, 0, 0], [0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] # compute the absolute value of delta amount with a flag indicating whether it's positive exec.delta_amount_absolute - # => [[is_delta_amount_positive, delta_amount_abs, 0, 0, 0], ...] + # => [is_delta_amount_positive, [delta_amount_abs, 0, 0, 0], [0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] - # rename is_delta_amount_positive to was_added - swap.3 drop - # => [[delta_amount_abs, 0, was_added, 0], ...] + # define the was_added value as equivalent to is_delta_amount_positive + # this value is 1 if the amount was added and 0 if the amount was removed + swap.6 drop + # => [[delta_amount_abs, 0, 0, 0], [0, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] dup neq.0 - # => [is_delta_amount_non_zero, [delta_amount_abs, 0, was_added, 0], [faucet_id_prefix, faucet_id_suffix, 0, 0], ...] + # => [is_delta_amount_non_zero, [delta_amount_abs, 0, 0, 0], [0, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] # if delta amount is non-zero, update the hasher if.true - swap.7 - # => [[0, 0, was_added, 0], [faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], ...] - - drop push.DOMAIN_ASSET - # => [[domain, 0, was_added, 0], [faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], ...] - - swap.3 - # => [[0, 0, was_added, domain], [faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], ...] + push.DOMAIN_ASSET swap.5 drop + # => [[delta_amount_abs, 0, 0, 0], [domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] + # swap value and metadata words swapw - # => [[faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], [0, 0, was_added, domain], RATE, RATE, PERM] + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], [delta_amount_abs, 0, 0, 0], RATE0, RATE1, CAPACITY] # drop previous RATE elements swapdw dropw dropw - # => [[faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], [0, 0, was_added, domain], PERM] + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], [delta_amount_abs, 0, 0, 0], CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else # discard values loaded from map: KEY, VALUE0 dropw dropw - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # load iter and has_next loc_load.1 - # => [iter, RATE, RATE, PERM] + # => [iter, RATE0, RATE1, CAPACITY] loc_load.0 - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] end # drop iter drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the non-fungible asset vault delta. #! -#! Inputs: [RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] @locals(2) proc update_non_fungible_asset_delta - exec.memory::get_account_delta_non_fungible_asset_ptr - # => [account_delta_non_fungible_asset_ptr, RATE, RATE, PERM] + push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [account_delta_non_fungible_asset_ptr, RATE0, RATE1, CAPACITY] exec.link_map::iter - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] # enter loop if the link map is not empty while.true - exec.link_map::next_key_value - # => [KEY, VALUE0, has_next, iter, ...] + exec.link_map::next_key_double_value + # => [KEY, VALUE0, VALUE1, has_next, iter, ...] # store has_next - movup.8 loc_store.0 - # => [KEY, VALUE0, iter, ...] + movup.12 loc_store.0 + # => [KEY, VALUE0, VALUE1, iter, ...] # store iter - movup.8 loc_store.1 - # => [KEY, VALUE0, ...] + movup.12 loc_store.1 + # => [KEY, VALUE0, VALUE1, ...] # this stack state is equivalent to: - # => [ASSET, [was_added, 0, 0, 0], ...] + # => [ASSET_KEY, [was_added, 0, 0, 0], ASSET_VALUE, ...] dup.4 neq.0 - # => [was_added_or_removed, ASSET, [was_added, 0, 0, 0], ...] + # => [was_added_or_removed, ASSET_KEY, [was_added, 0, 0, 0], ASSET_VALUE, ...] # if the asset was added or removed (i.e. if was_added != 0), update the hasher if.true - movup.4 - # => [was_added, ASSET, [0, 0, 0], ...] + swapw + # => [[was_added, 0, 0, 0], ASSET_KEY, ASSET_VALUE, ...] # convert was_added to a boolean # was_added is 1 if the asset was added and 0 - 1 if it was removed eq.1 - # => [was_added, ASSET, [0, 0, 0], ...] + # => [[was_added, 0, 0, 0], [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, ...] - movdn.6 - # => [ASSET, [0, 0, was_added, 0], ...] + # replace asset_id_prefix with was_added and drop the remaining word + swap.5 dropw + # => [[asset_id_suffix, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, ...] - push.DOMAIN_ASSET swap.8 drop - # => [ASSET, [0, 0, was_added, domain], RATE, RATE, PERM] + # replace asset_id_suffix with domain + drop push.DOMAIN_ASSET + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, ...] # drop previous RATE elements swapdw dropw dropw - # => [ASSET, [0, 0, was_added, domain], PERM] + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else - # discard the two key and value words loaded from the map - dropw dropw - # => [RATE, RATE, PERM] + # discard the key, value0 and value1 words loaded from the map + dropw dropw dropw + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # load iter and has_next loc_load.1 - # => [iter, RATE, RATE, PERM] + # => [iter, RATE0, RATE1, CAPACITY] loc_load.0 - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] end # drop iter drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end # DELTA BOOKKEEPING @@ -497,21 +489,23 @@ end #! #! Assumes the asset is valid, so it should be called after asset_vault::add_asset. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the asset. +#! - ASSET_KEY is the vault key of the asset that is added. +#! - ASSET_VALUE is the value of the asset that is added. pub proc add_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true - exec.asset_vault::build_fungible_asset_vault_key swapw - # => [ASSET, ASSET_KEY] + swapw + # => [ASSET_VALUE, ASSET_KEY] - drop drop drop movdn.4 + exec.fungible_asset::value_into_amount + movdn.4 # => [ASSET_KEY, amount] exec.add_fungible_asset @@ -527,21 +521,23 @@ end #! Assumes the asset is valid, so it should be called after asset_vault::remove_asset #! (which would abort if the asset is invalid). #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the asset. +#! - ASSET_KEY is the vault key of the asset that is removed. +#! - ASSET_VALUE is the value of the asset that is removed. pub proc remove_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET, vault_root_ptr] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true - exec.asset_vault::build_fungible_asset_vault_key swapw - # => [ASSET, ASSET_KEY] + swapw + # => [ASSET_VALUE, ASSET_KEY] - drop drop drop movdn.4 + exec.fungible_asset::value_into_amount + movdn.4 # => [ASSET_KEY, amount] exec.remove_fungible_asset @@ -561,7 +557,7 @@ end #! - ASSET_KEY is the asset key of the fungible asset. #! - amount is the amount by which the fungible asset's amount increases. pub proc add_fungible_asset - dupw exec.memory::get_account_delta_fungible_asset_ptr + dupw push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, amount] # retrieve the current delta amount @@ -582,7 +578,7 @@ pub proc add_fungible_asset swapw padw movdnw.2 # => [ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] - exec.memory::get_account_delta_fungible_asset_ptr + push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] exec.link_map::set drop @@ -598,7 +594,7 @@ end #! - ASSET_KEY is the asset key of the fungible asset. #! - amount is the amount by which the fungible asset's amount decreases. pub proc remove_fungible_asset - dupw exec.memory::get_account_delta_fungible_asset_ptr + dupw push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, amount] # retrieve the current delta amount @@ -619,7 +615,7 @@ pub proc remove_fungible_asset swapw padw movdnw.2 # => [ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] - exec.memory::get_account_delta_fungible_asset_ptr + push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] exec.link_map::set drop @@ -628,7 +624,7 @@ end #! Adds the given non-fungible asset to the non-fungible asset vault delta. #! -#! ASSET must be a valid non-fungible asset. +#! ASSET_VALUE must be a valid non-fungible asset. #! #! If the key does not exist in the delta map, the non-fungible asset's was_added value is 0. #! When it is added to the account vault, was_added is incremented by 1; when it is removed from @@ -642,29 +638,48 @@ end #! 0 -> no change to the asset #! +1 -> asset was added #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the non-fungible asset to be added. +#! - ASSET_KEY is the vault key of the non-fungible asset to be added. +#! - ASSET_VALUE is the value of the non-fungible asset to be added. pub proc add_non_fungible_asset - dupw exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, ASSET] + dupw push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, ASSET_VALUE] # retrieve the current delta - # contains_key can be ignored because the default value is an empty word and the - # was_added value is therefore 0 + # contains_key can be ignored because the asset vault ensures each asset key is only added to + # the delta once + # if no entry exists, the default value is an empty word and so the was_added value is 0 exec.link_map::get drop - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, PREV_ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + dupw.3 movupw.2 + # => [PREV_ASSET_VALUE, ASSET_VALUE, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # the asset vault guarantees that this procedure is only called when the asset was not yet + # _added_ to the vault, so it can either be absent or it could have been removed + # absent means PREV_ASSET_VALUE is the EMPTY_WORD + # removal means PREV_ASSET_VALUE is equal to ASSET_VALUE + # sanity check that this assumption is true + exec.word::testz movdn.8 + # => [PREV_ASSET_VALUE, ASSET_VALUE, is_empty_word, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + exec.word::eq or + assert.err="add: prev_asset_value must be empty or equal to asset_value for non-fungible assets" + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # add 1 to cancel out a previous removal (was_added = 0) or mark the asset as added (was_added = 1) add.1 - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] - movupw.2 - # => [ASSET, was_added, 0, 0, 0, EMPTY_WORD] + swapw + # => [ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] - exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, was_added, 0, 0, 0, EMPTY_WORD] + push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] exec.link_map::set drop # => [] @@ -672,33 +687,52 @@ end #! Removes the given non-fungible asset from the non-fungible asset vault delta. #! -#! ASSET must be a valid non-fungible asset. +#! ASSET_VALUE must be a valid non-fungible asset. #! #! See add_non_fungible_asset for documentation. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the non-fungible asset to be removed. +#! - ASSET_KEY is the vault key of the non-fungible asset to be removed. +#! - ASSET_VALUE is the value of the non-fungible asset to be removed. pub proc remove_non_fungible_asset - dupw exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, ASSET] + dupw push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, ASSET_VALUE] # retrieve the current delta - # contains_key can be ignored because the default value is an empty word and the - # was_added value is therefore 0 + # contains_key can be ignored because the asset vault ensures each asset key is only removed + # from the delta once + # if no entry exists, the default value is an empty word and so the was_added value is 0 exec.link_map::get drop - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, PREV_ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + dupw.3 movupw.2 + # => [PREV_ASSET_VALUE, ASSET_VALUE, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # the asset vault guarantees that this procedure is only called when the asset was not yet + # _removed_ from the vault, so it can either be present or it could have been removed + # absent means PREV_ASSET_VALUE is the EMPTY_WORD + # addition means PREV_ASSET_VALUE is equal to ASSET_VALUE + # sanity check that this assumption is true + + exec.word::testz movdn.8 + # => [PREV_ASSET_VALUE, ASSET_VALUE, is_empty_word, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + exec.word::eq or + assert.err="remove: prev_asset_value must be empty or equal to asset_value for non-fungible assets" + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + # sub 1 to cancel out a previous addition (was_added = 1) or mark the asset as removed (was_added = -1) sub.1 - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] - movupw.2 - # => [ASSET, was_added, 0, 0, 0, EMPTY_WORD] + swapw + # => [ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] - exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, was_added, 0, 0, 0, EMPTY_WORD] + push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] exec.link_map::set drop # => [] @@ -730,7 +764,7 @@ pub proc set_map_item # => [KEY, PREV_VALUE, NEW_VALUE] # store KEY in local - loc_storew_be.4 + loc_storew_le.4 # => [KEY, PREV_VALUE, NEW_VALUE] loc_load.0 @@ -761,7 +795,7 @@ pub proc set_map_item # => [INITIAL_VALUE, NEW_VALUE] # load key and index from locals - padw loc_loadw_be.4 loc_load.0 + padw loc_loadw_le.4 loc_load.0 # => [account_delta_storage_map_ptr, KEY, INITIAL_VALUE, NEW_VALUE] exec.link_map::set drop diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm b/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm index d5f5d20580..b703f252f5 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm @@ -1,174 +1,135 @@ use $kernel::account_id +use $kernel::fungible_asset +use $kernel::non_fungible_asset +use $kernel::util::asset->util_asset -# ERRORS +# ERRORS # ================================================================================================= -const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ONE_MUST_BE_ZERO="malformed fungible asset: `ASSET[1]` must be 0" - -const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_TWO_AND_THREE_MUST_BE_FUNGIBLE_FAUCET_ID="malformed fungible asset: `ASSET[2]` and `ASSET[3]` must be a valid fungible faucet id" - -const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS="malformed fungible asset: `ASSET[0]` exceeds the maximum allowed amount" - -const ERR_NON_FUNGIBLE_ASSET_FORMAT_ELEMENT_THREE_MUST_BE_FUNGIBLE_FAUCET_ID="malformed non-fungible asset: `ASSET[3]` is not a valid non-fungible faucet id" - -const ERR_NON_FUNGIBLE_ASSET_FORMAT_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO="malformed non-fungible asset: the most significant bit must be 0" - -const ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the fungible asset is not this faucet" - -const ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the non-fungible asset is not this faucet" +const ERR_VAULT_ASSET_KEY_ACCOUNT_ID_MUST_BE_FAUCET="account ID in asset vault key must be either of type fungible or non-fungible faucet" # CONSTANT ACCESSORS # ================================================================================================= pub use ::$kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +pub use ::$kernel::util::asset::ASSET_SIZE +pub use ::$kernel::util::asset::ASSET_VALUE_MEMORY_OFFSET +pub use ::$kernel::util::asset::key_to_faucet_id +pub use ::$kernel::util::asset::key_into_faucet_id +pub use ::$kernel::util::asset::key_to_asset_id +pub use ::$kernel::util::asset::key_into_asset_id +pub use ::$kernel::util::asset::key_to_callbacks_enabled +pub use ::$kernel::util::asset::store +pub use ::$kernel::util::asset::load # PROCEDURES # ================================================================================================= -#! Validates that a fungible asset is well formed. +#! Returns a boolean indicating whether the asset is fungible. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [is_fungible_asset, ASSET_KEY] #! #! Where: -#! - ASSET is the asset to validate. -#! -#! Panics if: -#! - the asset is not well formed. -pub proc validate_fungible_asset - # assert that ASSET[1] == ZERO - dup.2 not assert.err=ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ONE_MUST_BE_ZERO - # => [ASSET] - - # assert that the tuple (ASSET[3], ASSET[2]) forms a valid account ID - dup.1 dup.1 exec.account_id::validate - # => [ASSET] - - # assert that the prefix (ASSET[3]) of the account ID is of type fungible faucet - dup exec.account_id::is_fungible_faucet - assert.err=ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_TWO_AND_THREE_MUST_BE_FUNGIBLE_FAUCET_ID - # => [ASSET] - - # assert that the max amount (ASSET[0]) of a fungible asset is not exceeded - dup.3 lte.FUNGIBLE_ASSET_MAX_AMOUNT - assert.err=ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS - # => [ASSET] +#! - ASSET_KEY is the vault key of the asset to check. +#! - is_fungible_asset is a boolean indicating whether the asset is fungible. +pub proc is_fungible_asset_key + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + dup.3 exec.account_id::is_fungible_faucet + # => [is_fungible_asset, ASSET_KEY] end -#! Returns a boolean indicating whether the asset is fungible. +#! Validates that an asset's vault key is well formed. #! -#! Inputs: [ASSET] -#! Outputs: [is_fungible_asset, ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] #! #! Where: -#! - ASSET is the asset to check. -#! - is_fungible_asset is a boolean indicating whether the asset is fungible. -pub proc is_fungible_asset - # check the first element, it will be: - # - zero for a fungible asset - # - non zero for a non-fungible asset - dup.2 eq.0 - # => [is_fungible_asset, ASSET] +#! - ASSET_KEY is the vault key of the asset to validate. +#! +#! Panics if: +#! - the asset key is not a valid fungible or non-fungible asset key (see +#! fungible_asset::validate_key and non_fungible_asset::validate_key). +pub proc validate_key + # check if the asset key is fungible + exec.is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY] + + if.true + exec.fungible_asset::validate_key + # => [ASSET_KEY] + else + exec.non_fungible_asset::validate_key + # => [ASSET_KEY] + end + # => [ASSET_KEY] end -#! Validates that a non fungible asset is well formed. +#! Validates the issuer (faucet ID) and metadata in an asset vault key. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] #! #! Where: -#! - ASSET is the asset to validate. +#! - ASSET_KEY is the vault key of the asset to validate. #! #! Panics if: -#! - the asset is not well formed. -pub proc validate_non_fungible_asset - # assert that ASSET[3] is a valid account ID prefix - # hack: because we only have the prefix we add a 0 as the suffix which is always valid - push.0 dup.1 exec.account_id::validate - # => [ASSET] - - # assert that the account ID prefix ASSET[3] is of type non fungible faucet - dup exec.account_id::is_non_fungible_faucet - assert.err=ERR_NON_FUNGIBLE_ASSET_FORMAT_ELEMENT_THREE_MUST_BE_FUNGIBLE_FAUCET_ID - # => [ASSET] +#! - the asset metadata is invalid (not 0 or 1). +#! - the faucet ID in the key is not a valid account ID. +pub proc validate_issuer + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + dup.3 dup.3 exec.util_asset::split_suffix_and_metadata + # => [asset_metadata, faucet_id_suffix, faucet_id_prefix, ASSET_KEY] + + exec.util_asset::validate_metadata + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] + + exec.account_id::validate + # => [ASSET_KEY] end #! Returns a boolean indicating whether the asset is non-fungible. #! -#! Inputs: [ASSET] -#! Outputs: [is_non_fungible_asset, ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [is_non_fungible_asset, ASSET_KEY] #! #! Where: -#! - ASSET is the asset to check. +#! - ASSET_KEY is the vault key of the asset to check. #! - is_non_fungible_asset is a boolean indicating whether the asset is non-fungible. -pub proc is_non_fungible_asset - # check the first element, it will be: - # - zero for a fungible asset - # - non zero for a non-fungible asset - exec.is_fungible_asset not - # => [is_non_fungible_asset, ASSET] +pub proc is_non_fungible_asset_key + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + dup.3 exec.account_id::is_non_fungible_faucet + # => [is_non_fungible_asset, ASSET_KEY] end #! Validates that an asset is well formed. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to validate. +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. #! #! Panics if: -#! - the asset is not well formed. -pub proc validate_asset +#! - the asset is not a valid fungible or non-fungible asset (see fungible_asset::validate and +#! non_fungible_asset::validate_key). +pub proc validate # check if the asset is fungible - exec.is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] # if the asset is fungible, validate the fungible asset if.true - exec.validate_fungible_asset + exec.fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE] else # if the asset is non fungible, validate the non fungible asset - exec.validate_non_fungible_asset + exec.non_fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE] end - # => [ASSET] -end - -#! Validates that a fungible asset is associated with the provided faucet_id. -#! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, ASSET] -#! Outputs: [ASSET] -#! -#! Where: -#! - faucet_id_prefix is the prefix of the faucet's account ID. -#! - ASSET is the asset to validate. -pub proc validate_fungible_asset_origin - # assert the origin of the asset is the faucet_id provided via the stack - dup.3 dup.3 - # => [asset_id_prefix, asset_id_suffix, faucet_id_prefix, faucet_id_suffix, ASSET] - - exec.account_id::is_equal assert.err=ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN - # => [ASSET] - - # assert the fungible asset is valid - exec.validate_fungible_asset - # => [ASSET] -end - -#! Validates that a non-fungible asset is associated with the provided faucet_id. -#! -#! Inputs: [faucet_id_prefix, ASSET] -#! Outputs: [ASSET] -#! -#! Where: -#! - faucet_id_prefix is the prefix of the faucet's account ID. -#! - ASSET is the asset to validate. -pub proc validate_non_fungible_asset_origin - # assert the origin of the asset is the faucet_id prefix provided via the stack - dup.1 assert_eq.err=ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN - # => [ASSET] - - # assert the non-fungible asset is valid - exec.validate_non_fungible_asset - # => [ASSET] + # => [ASSET_KEY, ASSET_VALUE] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm b/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm index 07f7d3dbb7..51444b0773 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm @@ -1,47 +1,35 @@ use miden::core::collections::smt -use miden::core::word -use $kernel::account_id use $kernel::asset -use $kernel::memory -use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +use $kernel::fungible_asset +use $kernel::non_fungible_asset # ERRORS # ================================================================================================= -const ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED="adding the fungible asset to the vault would exceed the max amount of 9223372036854775807" - const ERR_VAULT_ADD_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID="failed to add fungible asset to the asset vault due to the initial value being invalid" const ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS="the non-fungible asset already exists in the asset vault" -const ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW="failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove" - const ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID="failed to remove fungible asset from the asset vault due to the initial value being invalid" const ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND="failed to remove non-existent non-fungible asset from the vault" -# CONSTANTS -# ================================================================================================= - -# The bitmask that when applied will set the fungible bit to zero. -const INVERSE_FUNGIBLE_BITMASK_U32=0xffffffdf # last byte: 0b1101_1111 - # ACCESSORS # ================================================================================================= -#! Returns the ASSET associated with the provided asset vault key. +#! Returns the ASSET_VALUE associated with the provided asset vault key. #! #! Inputs: [ASSET_KEY, vault_root_ptr] -#! Outputs: [ASSET] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset from the vault, which can be the EMPTY_WORD if it isn't present. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. pub proc get_asset # load the asset vault root from memory - padw movup.8 mem_loadw_be + padw movup.8 mem_loadw_le # => [ASSET_VAULT_ROOT, ASSET_KEY] swapw @@ -49,10 +37,10 @@ pub proc get_asset # lookup asset exec.smt::get swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end -#! Returns the _peeked_ ASSET associated with the provided asset vault key. +#! Returns the _peeked_ asset associated with the provided asset vault key. #! #! WARNING: Peeked means the asset is loaded from the advice provider, which is susceptible to #! manipulation from a malicious host. Therefore this should only be used when the inclusion of the @@ -69,15 +57,15 @@ end #! that the merkle paths are present prior to calling. #! #! Inputs: [ASSET_KEY, vault_root_ptr] -#! Outputs: [ASSET] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the retrieved ASSET. +#! - ASSET_VALUE is the retrieved asset. pub proc peek_asset # load the asset vault root from memory - padw movup.8 mem_loadw_be + padw movup.8 mem_loadw_le # => [ASSET_VAULT_ROOT, ASSET_KEY] swapw @@ -86,15 +74,15 @@ pub proc peek_asset # lookup asset exec.smt::peek # OS => [ASSET_KEY, ASSET_VAULT_ROOT] - # AS => [ASSET] + # AS => [ASSET_VALUE] dropw # OS => [ASSET_VAULT_ROOT] - # AS => [ASSET] + # AS => [ASSET_VALUE] # this overwrites the vault root adv_loadw - # OS => [ASSET] + # OS => [ASSET_VALUE] # AS => [] end @@ -107,325 +95,279 @@ end #! If the amount to be added is zero and the asset does not already exist in the vault, the vault #! remains unchanged. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE'] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET is the fungible asset to add to the vault. -#! - ASSET' is the total fungible asset in the account vault after ASSET was added to it. +#! - ASSET_KEY is the vault key of the fungible asset to add to the vault. +#! - ASSET_VALUE is the fungible asset to add to the vault. +#! - ASSET_VALUE' is the total fungible asset in the account vault after ASSET_VALUE was added to it. +#! +#! Locals: +#! - 0: vault_root_ptr #! #! Panics if: -#! - the total value of assets is greater than or equal to 2^63. +#! - the total value of assets is greater than or equal to FUNGIBLE_ASSET_MAX_AMOUNT. +@locals(1) pub proc add_fungible_asset - # Create the asset key from the asset. + # Get the current asset using `peek_asset`. # --------------------------------------------------------------------------------------------- - exec.build_fungible_asset_vault_key - # => [ASSET_KEY, faucet_id_prefix, faucet_id_suffix, 0, amount, vault_root_ptr] + # store the vault_root_ptr + movup.8 loc_store.0 + # => [ASSET_KEY, ASSET_VALUE] - movup.6 drop - # => [[faucet_id_prefix, faucet_id_suffix, 0, 0], faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] + dupw loc_load.0 movdn.4 + # => [ASSET_KEY, vault_root_ptr, ASSET_KEY, ASSET_VALUE] - # Get the asset vault root and read the current asset using the `push_smtpeek` decorator. - # --------------------------------------------------------------------------------------------- + exec.peek_asset + # => [CUR_VAULT_VALUE, ASSET_KEY, ASSET_VALUE] - padw dup.11 - # => [vault_root_ptr, pad(4), ASSET_KEY, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] + # since we have peeked the value, we need to later assert that the actual value matches this + # one, so we'll keep a copy for later + # set the current asset value equal to the current vault value + swapw dupw.1 + # => [CURRENT_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE, ASSET_VALUE] - # the current asset may be the empty word if it does not exist and so its faucet id would be zeroes - # we therefore overwrite the faucet id with the faucet id from ASSET to account for this edge case - mem_loadw_be swapw - # => [ASSET_KEY, VAULT_ROOT, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] + movupw.3 + # => [ASSET_VALUE, CURRENT_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE] - exec.smt::peek adv_loadw - # => [CUR_VAULT_VALUE, VAULT_ROOT, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - swapw - # => [VAULT_ROOT, CUR_VAULT_VALUE, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - dupw.1 - # => [CUR_VAULT_VALUE, VAULT_ROOT, CUR_VAULT_VALUE, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - drop drop - # => [[0, cur_amount], VAULT_ROOT, CUR_VAULT_VALUE, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - movup.11 movup.11 - # => [[faucet_id_prefix, faucet_id_suffix, 0, cur_amount], VAULT_ROOT, CUR_VAULT_VALUE, amount, vault_root_ptr] - - # Check the new amount does not exceed the maximum allowed amount and add the two - # fungible assets together. + # Merge the assets. # --------------------------------------------------------------------------------------------- - # arrange amounts - movup.3 movup.12 dup - # => [amount, amount, cur_amount, faucet_id_prefix, faucet_id_suffix, 0, VAULT_ROOT, CUR_VAULT_VALUE, vault_root_ptr] - - # compute max_amount - cur_amount - push.FUNGIBLE_ASSET_MAX_AMOUNT dup.3 sub - # => [(max_amount - cur_amount), amount, amount, cur_amount, faucet_id_prefix, faucet_id_suffix, 0, VAULT_ROOT, - # CUR_VAULT_VALUE, vault_root_ptr] + exec.fungible_asset::merge + # => [MERGED_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE] - # assert amount + cur_amount < max_amount - lte assert.err=ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED - # => [amount, cur_amount, faucet_id_prefix, faucet_id_suffix, 0, VAULT_ROOT, CUR_VAULT_VALUE, vault_root_ptr] + # store a copy of MERGED_ASSET_VALUE for returning + movdnw.2 dupw.2 + # => [MERGED_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] - # add asset amounts - add movdn.3 - # => [ASSET', VAULT_ROOT, CUR_VAULT_VALUE, vault_root_ptr] - - # Create the asset key and insert the updated asset. + # Insert the merged asset. # --------------------------------------------------------------------------------------------- - # create the asset key to prepare insertion of the asset into the vault - dupw movdnw.3 - # => [ASSET', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - dupw - # => [ASSET', ASSET', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - push.0 swap.4 drop - # => [[faucet_id_prefix, faucet_id_suffix, 0, 0], ASSET', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - swapw - # => [ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] + # load the vault root + padw loc_load.0 mem_loadw_le + # => [VAULT_ROOT, MERGED_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] - # pad empty word for insertion - padw - # => [EMPTY_WORD, ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - - # check if amount of new asset is zero - # if it is zero, insert EMPTY_WORD to keep the merkle tree sparse - dup.7 eq.0 - # => [is_amount_zero, EMPTY_WORD, ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - - # If is_amount_zero EMPTY_WORD remains. - # If !is_amount_zero ASSET' remains. - cdropw - # => [EMPTY_WORD_OR_ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] + movdnw.2 + # => [MERGED_ASSET_VALUE, ASSET_KEY, VAULT_ROOT, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] - # update asset in vault and assert the old value is equivalent to the value provided via the - # decorator + # update asset in vault exec.smt::set - # => [PREV_ASSET, VAULT_ROOT', CUR_VAULT_VALUE, ASSET', vault_root_ptr] + # => [PREV_VAULT_VALUE, NEW_VAULT_ROOT, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] + # assert PREV_VAULT_VALUE = CUR_VAULT_VALUE to make sure peek_asset returned the correct asset movupw.2 assert_eqw.err=ERR_VAULT_ADD_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID - # => [VAULT_ROOT', ASSET', vault_root_ptr] + # => [NEW_VAULT_ROOT, MERGED_ASSET_VALUE] # update the vault root - movup.8 mem_storew_be dropw - # => [ASSET'] + loc_load.0 mem_storew_le dropw + # => [MERGED_ASSET_VALUE] + # => [ASSET_VALUE'] end #! Add the specified non-fungible asset to the vault. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET is the non-fungible asset that is added to the vault. +#! - ASSET_KEY is the vault key of the non-fungible asset that is added to the vault. +#! - ASSET_VALUE is the non-fungible asset that is added to the vault. #! #! Panics if: #! - the vault already contains the same non-fungible asset. pub proc add_non_fungible_asset - # Build the asset key from the non-fungible asset. - # --------------------------------------------------------------------------------------------- - - dupw exec.build_non_fungible_asset_vault_key - # => [ASSET_KEY, ASSET, vault_root_ptr] - # Load VAULT_ROOT and insert asset. # --------------------------------------------------------------------------------------------- padw dup.12 - # => [vault_root_ptr, pad(4), ASSET_KEY, ASSET, vault_root_ptr] - mem_loadw_be swapw - # => [ASSET_KEY, VAULT_ROOT, ASSET, vault_root_ptr] + # => [vault_root_ptr, pad(4), ASSET_KEY, ASSET_VALUE, vault_root_ptr] + + mem_loadw_le swapw + # => [ASSET_KEY, VAULT_ROOT, ASSET_VALUE, vault_root_ptr] + dupw.2 - # => [ASSET, ASSET_KEY, VAULT_ROOT, ASSET, vault_root_ptr] + # => [ASSET_VALUE, ASSET_KEY, VAULT_ROOT, ASSET_VALUE, vault_root_ptr] # insert asset into vault exec.smt::set - # => [OLD_VAL, VAULT_ROOT', ASSET, vault_root_ptr] + # => [OLD_VAL, VAULT_ROOT', ASSET_VALUE, vault_root_ptr] # assert old value was empty padw assert_eqw.err=ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS - # => [VAULT_ROOT', ASSET, vault_root_ptr] + # => [VAULT_ROOT', ASSET_VALUE, vault_root_ptr] # update the vault root - movup.8 mem_storew_be dropw - # => [ASSET] + movup.8 mem_storew_le dropw + # => [ASSET_VALUE] end #! Add the specified asset to the vault. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE'] #! #! Where: -#! - ASSET is the asset that is added to the vault. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset that is added to the vault. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET' final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. -#! - the total value of two fungible assets is greater than or equal to 2^63. +#! - the total value of two fungible assets is greater than FUNGIBLE_ASSET_MAX_AMOUNT. #! - the vault already contains the same non-fungible asset. pub proc add_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE, vault_root_ptr] # add the asset to the asset vault if.true # validate the fungible asset - exec.asset::validate_fungible_asset - # => [ASSET] + exec.fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE, vault_root_ptr] exec.add_fungible_asset - # => [ASSET'] + # => [ASSET_VALUE'] else # validate the non-fungible asset - exec.asset::validate_non_fungible_asset - # => [ASSET] + exec.non_fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE, vault_root_ptr] exec.add_non_fungible_asset - # => [ASSET'] + # => [ASSET_VALUE'] end end # REMOVE ASSET # ================================================================================================= -#! Remove the specified fungible asset from the vault. +#! Splits ASSET_VALUE off the existing asset in the vault associated with the ASSET_KEY +#! and returns the remaining asset value. +#! +#! For instance, if ASSET_KEY points to a fungible asset with amount 100, and ASSET_VALUE has +#! amount 30, then a fungible asset with amount 70 remains in the vault and is returned. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [REMAINING_ASSET_VALUE] #! #! Where: -#! - ASSET is the fungible asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the fungible asset to remove from the vault. +#! - REMAINING_ASSET_VALUE is the value of the fungible asset remaining in the vault after removal. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! #! Locals: -#! - 0..4: ASSET +#! - 0..4: REMAINING_ASSET_VALUE #! #! Panics if: #! - the amount of the asset in the vault is less than the amount to be removed. @locals(4) pub proc remove_fungible_asset - exec.build_fungible_asset_vault_key - # => [ASSET_KEY, ASSET, vault_root_ptr] - dupw movdnw.2 - # => [ASSET_KEY, ASSET, ASSET_KEY, vault_root_ptr] + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, vault_root_ptr] dup.12 movdn.4 - # => [ASSET_KEY, vault_root_ptr, ASSET, ASSET_KEY, vault_root_ptr] + # => [ASSET_KEY, vault_root_ptr, ASSET_VALUE, ASSET_KEY, vault_root_ptr] exec.peek_asset - # => [PEEKED_ASSET, ASSET, ASSET_KEY, vault_root_ptr] + # => [PEEKED_ASSET_VALUE, ASSET_VALUE, ASSET_KEY, vault_root_ptr] movdnw.2 - # => [ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - # store ASSET so we can return it later - loc_storew_be.0 - # => [ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - dup.3 dup.12 - # => [peeked_amount, amount, ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # => [ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - # assert amount <= peeked_amount - lte assert.err=ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW - # => [ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - # => [[faucet_id_prefix, faucet_id_suffix, 0, amount], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + dupw.2 swapw + # => [ASSET_VALUE, PEEKED_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - dup.11 movup.4 - # => [amount, peeked_amount, [faucet_id_prefix, faucet_id_suffix, 0], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # compute REMAINING_ASSET_VALUE = PEEKED_ASSET_VALUE - ASSET_VALUE + exec.fungible_asset::split + # => [REMAINING_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - # compute peeked_amount - amount - sub - # => [new_amount, [faucet_id_prefix, faucet_id_suffix, 0], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # store remaining asset value so we can return it later + loc_storew_le.0 + # => [REMAINING_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - movdn.3 - # => [[faucet_id_prefix, faucet_id_suffix, new_amount], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - # => [ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - padw dup.7 - # => [new_amount, EMPTY_WORD, ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - eq.0 - # => [is_new_amount_zero, EMPTY_WORD, ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - # If is_new_amount_zero EMPTY_WORD remains. - # If !is_new_amount_zero ASSET' remains. - cdropw - # => [EMPTY_WORD_OR_ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - dup.12 padw movup.4 mem_loadw_be - # => [VAULT_ROOT, EMPTY_WORD_OR_ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + dup.12 padw movup.4 mem_loadw_le + # => [VAULT_ROOT, REMAINING_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] movdnw.2 - # => [EMPTY_WORD_OR_ASSET', ASSET_KEY, VAULT_ROOT, PEEKED_ASSET, vault_root_ptr] + # => [REMAINING_ASSET_VALUE, ASSET_KEY, VAULT_ROOT, PEEKED_ASSET_VALUE, vault_root_ptr] # update asset in vault and assert the old value is equivalent to the peeked value provided # via peek_asset exec.smt::set - # => [OLD_VALUE, NEW_VAULT_ROOT, PEEKED_ASSET, vault_root_ptr] + # => [OLD_VALUE, NEW_VAULT_ROOT, PEEKED_ASSET_VALUE, vault_root_ptr] # assert OLD_VALUE == PEEKED_ASSET movupw.2 assert_eqw.err=ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID # => [NEW_VAULT_ROOT, vault_root_ptr] # update vault root - movup.4 mem_storew_be + movup.4 mem_storew_le # => [NEW_VAULT_ROOT] - loc_loadw_be.0 - # => [ASSET] + loc_loadw_le.0 + # => [REMAINING_ASSET_VALUE] end -#! Remove the specified non-fungible asset from the vault. +#! Remove the specified non-fungible asset from the vault and return the remaining asset value. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Since non-fungible assets are either fully present or absent, the remaining value after +#! removal is always EMPTY_WORD. +#! +#! Note that the ASSET_VALUE is only needed to check against the asset that was removed from the +#! vault. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [REMAINING_ASSET_VALUE] #! #! Where: -#! - ASSET is the non-fungible asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the non-fungible asset to remove from the vault. +#! - REMAINING_ASSET_VALUE is always EMPTY_WORD (nothing remains after removing a non-fungible asset). #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! #! Panics if: #! - the non-fungible asset is not found in the vault. pub proc remove_non_fungible_asset - # build non-fungible asset key - dupw exec.build_non_fungible_asset_vault_key padw - # => [pad(4), ASSET_KEY, ASSET, vault_root_ptr] - # load vault root - dup.12 mem_loadw_be - # => [VAULT_ROOT, ASSET_KEY, ASSET, vault_root_ptr] + padw dup.12 mem_loadw_le + # => [VAULT_ROOT, ASSET_KEY, ASSET_VALUE, vault_root_ptr] # prepare insertion of an EMPTY_WORD into the vault at the asset key to remove the asset swapw padw - # => [EMPTY_WORD, ASSET_KEY, VAULT_ROOT, ASSET, vault_root_ptr] + # => [EMPTY_WORD, ASSET_KEY, VAULT_ROOT, ASSET_VALUE, vault_root_ptr] - # update asset in vault + # insert empty word into the vault to remove the asset exec.smt::set - # => [OLD_VAL, VAULT_ROOT', ASSET, vault_root_ptr] + # => [REMOVED_ASSET_VALUE, NEW_VAULT_ROOT, ASSET_VALUE, vault_root_ptr] - # assert old value was not empty (we only need to check ASSET[3] which is the faucet id) - eq.0 assertz.err=ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND drop drop drop - # => [VAULT_ROOT', ASSET, vault_root_ptr] + movupw.2 assert_eqw.err=ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND + # => [NEW_VAULT_ROOT, vault_root_ptr] # update the vault root - movup.8 mem_storew_be dropw - # => [ASSET] + movup.4 mem_storew_le dropw + # => [] + + # push EMPTY_WORD to represent that nothing remains after non-fungible removal + padw + # => [REMAINING_ASSET_VALUE] end -#! Remove the specified asset from the vault. +#! Remove the specified asset from the vault and return the remaining asset value. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [REMAINING_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. +#! - REMAINING_ASSET_VALUE is the value of the asset remaining in the vault after removal. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! #! Panics if: @@ -434,67 +376,15 @@ end #! - the non-fungible asset is not found in the vault. pub proc remove_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET, vault_root_ptr] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE, vault_root_ptr] # remove the asset from the asset vault if.true exec.remove_fungible_asset - # => [ASSET] + # => [REMAINING_ASSET_VALUE] else exec.remove_non_fungible_asset - # => [ASSET] + # => [REMAINING_ASSET_VALUE] end end - -# HELPER PROCEDURES -# ================================================================================================= - -#! Builds the vault key of a non fungible asset. The asset is NOT validated and therefore must -#! be a valid non-fungible asset. -#! -#! Inputs: [ASSET] -#! Outputs: [ASSET_KEY] -#! -#! Where: -#! - ASSET is the non-fungible asset for which the vault key is built. -#! - ASSET_KEY is the vault key of the non-fungible asset. -pub proc build_non_fungible_asset_vault_key - # create the asset key from the non-fungible asset by swapping hash0 with the faucet id - # => [faucet_id_prefix, hash2, hash1, hash0] - swap.3 - # => [hash0, hash2, hash1 faucet_id_prefix] - - # disassemble hash0 into u32 limbs - u32split swap - # => [hash0_lo, hash0_hi, hash2, hash1 faucet_id_prefix] - - # set the fungible bit to 0 - u32and.INVERSE_FUNGIBLE_BITMASK_U32 - # => [hash0_lo', hash0_hi, hash2, hash1 faucet_id_prefix] - - # reassemble hash0 felt by multiplying the high part with 2^32 and adding the lo part - swap mul.0x0100000000 add - # => [ASSET_KEY] -end - -#! TODO: Add Rust <-> MASM test. -#! -#! Builds the vault key of a fungible asset. The asset is NOT validated and therefore must -#! be a valid fungible asset. -#! -#! Inputs: [ASSET] -#! Outputs: [ASSET_KEY, ASSET] -#! -#! Where: -#! - ASSET is the fungible asset for which the vault key is built. -#! - ASSET_KEY is the vault key of the fungible asset. -pub proc build_fungible_asset_vault_key - # => [faucet_id_prefix, faucet_id_suffix, 0, amount] - - push.0.0 - # => [0, 0, faucet_id_prefix, faucet_id_suffix, 0, amount] - - dup.3 dup.3 - # => [faucet_id_prefix, faucet_id_suffix, 0, 0, faucet_id_prefix, faucet_id_suffix, 0, amount] -end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/callbacks.masm b/crates/miden-protocol/asm/kernels/transaction/lib/callbacks.masm new file mode 100644 index 0000000000..c54522f0d5 --- /dev/null +++ b/crates/miden-protocol/asm/kernels/transaction/lib/callbacks.masm @@ -0,0 +1,197 @@ +use $kernel::tx +use $kernel::asset +use $kernel::account +use miden::core::word + +# CONSTANTS +# ================================================================================================== + +# The index of the local memory slot that contains the procedure root of the callback. +const CALLBACK_PROC_ROOT_LOC = 0 + +# The name of the storage slot where the procedure root for the on_before_asset_added_to_account callback +# is stored. +pub const ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT = word("miden::protocol::faucet::callback::on_before_asset_added_to_account") + +# The name of the storage slot where the procedure root for the on_before_asset_added_to_note callback +# is stored. +pub const ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT = word("miden::protocol::faucet::callback::on_before_asset_added_to_note") + +# PROCEDURES +# ================================================================================================== + +#! Invokes the `on_before_asset_added_to_account` callback on the faucet that issued the asset, +#! if the asset has callbacks enabled. +#! +#! The callback invocation is skipped in these cases: +#! - If the global callback flag in the asset key is `Disabled`. +#! - If the faucet does not have the callback storage slot. +#! - If the callback storage slot contains the empty word. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [PROCESSED_ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - PROCESSED_ASSET_VALUE is the asset value returned by the callback, or the original +#! ASSET_VALUE if callbacks are disabled. +pub proc on_before_asset_added_to_account + exec.asset::key_to_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY, ASSET_VALUE] + + if.true + # set custom_data = 0 + push.0 movdn.8 + # => [ASSET_KEY, ASSET_VALUE, custom_data = 0] + + push.ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT[0..2] + exec.invoke_callback + # => [PROCESSED_ASSET_VALUE] + else + # drop asset key + dropw + # => [ASSET_VALUE] + end + # => [PROCESSED_ASSET_VALUE] +end + +#! Invokes the `on_before_asset_added_to_note` callback on the faucet that issued the asset, +#! if the asset has callbacks enabled. +#! +#! The callback invocation is skipped in these cases: +#! - If the global callback flag in the asset key is `Disabled`. +#! - If the faucet does not have the callback storage slot. +#! - If the callback storage slot contains the empty word. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] +#! Outputs: [PROCESSED_ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - note_idx is the index of the output note the asset is being added to. +#! - PROCESSED_ASSET_VALUE is the asset value returned by the callback, or the original +#! ASSET_VALUE if callbacks are disabled. +pub proc on_before_asset_added_to_note + exec.asset::key_to_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY, ASSET_VALUE, note_idx] + + if.true + push.ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT[0..2] + exec.invoke_callback + # => [PROCESSED_ASSET_VALUE] + else + # drop asset key and note index + dropw movup.4 drop + # => [ASSET_VALUE] + end + # => [PROCESSED_ASSET_VALUE] +end + +#! Invokes a callback by starting a foreign context against the faucet, reading the callback +#! procedure root from the provided slot ID in the faucet's storage, and invoking it via `dyncall`. +#! +#! If the faucet does not have the callback storage slot, or if the slot contains the empty word, +#! the callback is skipped and the original ASSET_VALUE is returned. +#! +#! custom_data should be set to 0 for the account callback and to note_idx for the note callback. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, ASSET_KEY, ASSET_VALUE, custom_data] +#! Outputs: [PROCESSED_ASSET_VALUE] +#! +#! Where: +#! - slot_id* is the ID of the slot that contains the callback procedure root. +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - PROCESSED_ASSET_VALUE is the asset value returned by the callback, or the original +#! ASSET_VALUE if no callback is configured. +@locals(4) +proc invoke_callback + exec.start_foreign_callback_context + # => [should_invoke, PROC_ROOT, ASSET_KEY, ASSET_VALUE, custom_data] + + # only invoke the callback if the procedure root is not the empty word + if.true + # prepare for dyncall by storing procedure root in local memory + loc_storew_le.CALLBACK_PROC_ROOT_LOC dropw + # => [ASSET_KEY, ASSET_VALUE, custom_data] + + # pad the stack to 16 for the call + repeat.7 push.0 movdn.9 end + # => [ASSET_KEY, ASSET_VALUE, custom_data, pad(7)] + + # invoke the callback + locaddr.CALLBACK_PROC_ROOT_LOC + dyncall + # => [PROCESSED_ASSET_VALUE, pad(12)] + + # truncate the stack after the call + swapdw dropw dropw swapw dropw + # => [PROCESSED_ASSET_VALUE] + else + # drop proc root, asset key and custom_data + dropw dropw movup.4 drop + # => [ASSET_VALUE] + end + # => [PROCESSED_ASSET_VALUE] + + exec.end_foreign_callback_context + # => [PROCESSED_ASSET_VALUE] +end + +#! Prepares the invocation of a faucet callback by starting a foreign context against the faucet +#! identified by the asset key's faucet ID, looking up the callback procedure root from the +#! faucet's storage, and computing whether the callback should be invoked. +#! +#! The callback should be invoked if the storage slot exists and contains a non-empty procedure +#! root. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, ASSET_KEY, ASSET_VALUE] +#! Outputs: [should_invoke, PROC_ROOT, ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - slot_id_suffix and slot_id_prefix identify the storage slot containing the callback procedure root. +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - should_invoke is 1 if the callback should be invoked, 0 otherwise. +#! - PROC_ROOT is the procedure root of the callback, or the empty word if not found. +proc start_foreign_callback_context + # move slot IDs past ASSET_KEY and ASSET_VALUE + movdn.9 movdn.9 + # => [ASSET_KEY, ASSET_VALUE, slot_id_suffix, slot_id_prefix] + + exec.asset::key_to_faucet_id + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY, ASSET_VALUE, slot_id_suffix, slot_id_prefix] + + # start a foreign context against the faucet + exec.tx::start_foreign_context + # => [ASSET_KEY, ASSET_VALUE, slot_id_suffix, slot_id_prefix] + + # bring slot IDs back to top + movup.9 movup.9 + # => [slot_id_suffix, slot_id_prefix, ASSET_KEY, ASSET_VALUE] + + # try to find the callback procedure root in the faucet's storage + exec.account::find_item + # => [is_found, PROC_ROOT, ASSET_KEY, ASSET_VALUE] + + movdn.4 exec.word::testz not + # => [is_non_empty_word, PROC_ROOT, is_found, ASSET_KEY, ASSET_VALUE] + + # should_invoke = is_found && is_non_empty_word + movup.5 and + # => [should_invoke, PROC_ROOT, ASSET_KEY, ASSET_VALUE] +end + +#! Ends a foreign callback context. +#! +#! This pops the top of the account stack, making the previous account the active account. +#! +#! This wrapper exists only for uniformity with start_foreign_callback_context. +#! +#! Inputs: [] +#! Outputs: [] +proc end_foreign_callback_context + exec.tx::end_foreign_context +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm b/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm index 869a7a818a..2d64bb61ad 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm @@ -14,7 +14,7 @@ pub const MAX_ASSETS_PER_NOTE = 256 pub const MAX_INPUT_NOTES_PER_TX = 1024 # The size of the memory segment allocated to each note. -pub const NOTE_MEM_SIZE = 2048 +pub const NOTE_MEM_SIZE = 3072 # The depth of the Merkle tree used to commit to notes produced in a block. pub const NOTE_TREE_DEPTH = 16 @@ -29,7 +29,7 @@ pub const ACCOUNT_PROCEDURE_DATA_LENGTH = 4 # ================================================================================================= # Root of an empty Sparse Merkle Tree -pub const EMPTY_SMT_ROOT = [15321474589252129342, 17373224439259377994, 15071539326562317628, 3312677166725950353] +pub const EMPTY_SMT_ROOT = [11569107685829756166, 7187477731240244145, 8326334713638926095, 2239973196746300865] # Type of storage slot item in the account storage pub const STORAGE_SLOT_TYPE_VALUE = 0 diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm b/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm index 63a8f92a61..4ac2b7ceea 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm @@ -1,11 +1,14 @@ use $kernel::account use $kernel::account_delta +use $kernel::asset +use $kernel::asset::ASSET_SIZE use $kernel::asset_vault use $kernel::constants::NOTE_MEM_SIZE +use $kernel::fungible_asset use $kernel::memory use $kernel::note -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::word # ERRORS @@ -46,7 +49,7 @@ const SMT_SET_ADDITIONAL_CYCLES=250 # that this includes at least smt::set's best case number of cycles. # This can be _estimated_ using the transaction measurements on ExecutedTransaction and can be set # to the lowest observed value. -const NUM_POST_COMPUTE_FEE_CYCLES=500 +const NUM_POST_COMPUTE_FEE_CYCLES=608 # The number of cycles the epilogue is estimated to take after compute_fee has been executed. const ESTIMATED_AFTER_COMPUTE_FEE_CYCLES=NUM_POST_COMPUTE_FEE_CYCLES+SMT_SET_ADDITIONAL_CYCLES @@ -151,7 +154,7 @@ proc build_output_vault # output_notes_end_ptr] # compute the end pointer for output note asset looping - dup.3 mul.4 add swap + dup.3 mul.ASSET_SIZE add swap # => [assets_start_ptr, assets_end_ptr, output_vault_root_ptr, num_assets, note_data_ptr, # output_notes_end_ptr] @@ -168,8 +171,8 @@ proc build_output_vault # num_assets, note_data_ptr, output_notes_end_ptr] # read the output note asset from memory - padw dup.5 mem_loadw_be - # => [ASSET, output_vault_root_ptr, assets_start_ptr, assets_end_ptr, + dup.1 exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, output_vault_root_ptr, assets_start_ptr, assets_end_ptr, # output_vault_root_ptr, num_assets, note_data_ptr, output_notes_end_ptr] # insert output note asset into output vault @@ -178,7 +181,7 @@ proc build_output_vault # note_data_ptr, output_notes_end_ptr] # increment assets_start_ptr and asses if we should loop again - add.4 dup.1 dup.1 neq + add.ASSET_SIZE dup.1 dup.1 neq # => [should_loop, assets_start_ptr, assets_end_ptr, output_vault_root_ptr, num_assets, # note_data_ptr, output_notes_end_ptr] end @@ -216,7 +219,7 @@ proc execute_auth_procedure push.0 exec.memory::get_account_procedure_ptr # => [auth_procedure_ptr, AUTH_ARGS, pad(12)] - padw dup.4 mem_loadw_be + padw dup.4 mem_loadw_le # => [AUTH_PROC_ROOT, auth_procedure_ptr, AUTH_ARGS, pad(12)] # if auth procedure was called already, it must have been called by a user, which is disallowed @@ -273,23 +276,29 @@ proc compute_fee # => [verification_cost] end -#! Builds the fee asset with the provided fee amount and the native asset ID of the transaction's +#! Creates the fee asset with the provided fee amount and the native asset ID of the transaction's #! reference block as the faucet ID. #! #! Inputs: [fee_amount] -#! Outputs: [FEE_ASSET] +#! Outputs: [FEE_ASSET_KEY, FEE_ASSET_VALUE] #! #! Where: #! - fee_amount is the computed fee amount of the transaction in the native asset. -#! - FEE_ASSET is the fungible asset with amount set to fee_amount and the faucet ID set to the -#! native asset. -proc build_native_fee_asset +#! - FEE_ASSET_KEY is the asset vault key of the fee asset. +#! - FEE_ASSET_VALUE is the fungible asset with amount set to fee_amount and the faucet ID set to +#! the native asset. +proc create_native_fee_asset exec.memory::get_native_asset_id - # => [native_asset_id_prefix, native_asset_id_suffix, fee_amount] + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount] + + # assume the fee asset does not have callbacks + # this should be addressed more holistically with a fee construction refactor + push.0 + # => [enable_callbacks, native_asset_id_suffix, native_asset_id_prefix, fee_amount] - push.0 movdn.2 - # => [native_asset_id_prefix, native_asset_id_suffix, 0, fee_amount] - # => [FEE_ASSET] + # SAFETY: native asset ID should be fungible and amount should not be exceeded + exec.fungible_asset::create_unchecked + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE] end #! Computes the fee of this transaction and removes the asset from the native account's vault. @@ -300,26 +309,33 @@ end #! check. That's okay, because the logic is entirely determined by the transaction kernel. #! #! Inputs: [] -#! Outputs: [FEE_ASSET] +#! Outputs: [native_asset_id_suffix, native_asset_id_prefix, fee_amount] #! #! Where: #! - fee_amount is the computed fee amount of the transaction in the native asset. -#! - FEE_ASSET is the fungible asset with amount set to fee_amount and the faucet ID set to the +#! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet that issues the #! native asset. #! #! Panics if: -#! - the account vault does not contain the computed fee. +#! - the account vault contains less than the computed fee. proc compute_and_remove_fee # compute the fee the tx needs to pay - exec.compute_fee - # => [fee_amount] + exec.compute_fee dup + # => [fee_amount, fee_amount] # build the native asset from the fee amount - exec.build_native_fee_asset - # => [FEE_ASSET] + exec.create_native_fee_asset + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, fee_amount] emit.EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_EVENT - # => [FEE_ASSET] + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, fee_amount] + + # prepare the return value + exec.asset::key_to_faucet_id + # => [native_asset_id_suffix, native_asset_id_prefix, FEE_ASSET_KEY, FEE_ASSET_VALUE, fee_amount] + + movdn.9 movdn.9 + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, native_asset_id_suffix, native_asset_id_prefix, fee_amount] # remove the fee from the native account's vault # note that this deliberately does not use account::remove_asset_from_vault, because that @@ -329,13 +345,13 @@ proc compute_and_remove_fee # commitment has already been computed and so any modifications done to the delta at this point # are essentially ignored. - # fetch the vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [FEE_ASSET, acct_vault_root_ptr] + # fetch the vault root ptr + exec.memory::get_account_vault_root_ptr movdn.8 + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, account_vault_root_ptr, native_asset_id_suffix, native_asset_id_prefix, fee_amount] # remove the asset from the account vault - exec.asset_vault::remove_fungible_asset - # => [FEE_ASSET] + exec.asset_vault::remove_fungible_asset dropw + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount] end # TRANSACTION EPILOGUE PROCEDURE @@ -355,18 +371,23 @@ end #! them in the fee and calculating is easiest when the operations are simple. #! #! Inputs: [] -#! Outputs: [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num] +#! Outputs: [ +#! OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, +#! native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num +#! ] #! #! Where: #! - OUTPUT_NOTES_COMMITMENT is the commitment of the output notes. #! - ACCOUNT_UPDATE_COMMITMENT is the hash of the the final account commitment and account #! delta commitment. -#! - FEE_ASSET is the fungible asset used as the transaction fee. +#! - fee_amount is the computed fee amount of the transaction denominated in the native asset. +#! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet that issues the +#! native asset. #! - tx_expiration_block_num is the transaction expiration block number. #! #! Locals: #! - 0..4: OUTPUT_NOTES_COMMITMENT -#! - 4..8: FEE_ASSET +#! - 4..8: FEE_ASSET_INFO #! - 8..12: ACCOUNT_DELTA_COMMITMENT #! #! Panics if: @@ -415,7 +436,7 @@ pub proc finalize_transaction # => [OUTPUT_NOTES_COMMITMENT] # store commitment in local - loc_storew_be.0 dropw + loc_storew_le.0 dropw # => [] # ------ Compute account delta commitment ------ @@ -424,7 +445,7 @@ pub proc finalize_transaction # => [ACCOUNT_DELTA_COMMITMENT] # store commitment in local - loc_storew_be.8 + loc_storew_le.8 # => [ACCOUNT_DELTA_COMMITMENT] # ------ Assert that account was changed or notes were consumed ------ @@ -450,10 +471,14 @@ pub proc finalize_transaction # ------ Compute fees ------ exec.compute_and_remove_fee - # => [FEE_ASSET] + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount] + + # pad to word size so we can store the info as a word + push.0 movdn.3 + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount, 0] - # store fee asset in local - loc_storew_be.4 dropw + # store fee info in local memory + loc_storew_le.4 dropw # => [] # ------ Insert final account data into advice provider ------ @@ -481,27 +506,29 @@ pub proc finalize_transaction # ------ Compute and insert account update commitment ------ # load account delta commitment from local - padw loc_loadw_be.8 - # => [ACCOUNT_DELTA_COMMITMENT, FINAL_ACCOUNT_COMMITMENT] + padw loc_loadw_le.8 swapw + # => [FINAL_ACCOUNT_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] # insert into advice map ACCOUNT_UPDATE_COMMITMENT: (FINAL_ACCOUNT_COMMITMENT, ACCOUNT_DELTA_COMMITMENT), # where ACCOUNT_UPDATE_COMMITMENT = hash(FINAL_ACCOUNT_COMMITMENT || ACCOUNT_DELTA_COMMITMENT) adv.insert_hdword - # => [ACCOUNT_DELTA_COMMITMENT, FINAL_ACCOUNT_COMMITMENT] + # => [FINAL_ACCOUNT_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - exec.rpo256::merge + exec.poseidon2::merge # => [ACCOUNT_UPDATE_COMMITMENT] # ------ Build output stack ------ - exec.memory::get_expiration_block_num movdn.4 - # => [ACCOUNT_UPDATE_COMMITMENT, tx_expiration_block_num] - # load fee asset from local - padw loc_loadw_be.4 swapw - # => [ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num] + padw loc_loadw_le.4 swapw + # => [ACCOUNT_UPDATE_COMMITMENT, [native_asset_id_suffix, native_asset_id_prefix, fee_amount, 0]] + + # replace 0 with expiration block num + exec.memory::get_expiration_block_num swap.8 drop + # => [ACCOUNT_UPDATE_COMMITMENT, [native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num]] # load output notes commitment from local - padw loc_loadw_be.0 - # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num] + padw loc_loadw_le.0 + # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, + # native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm b/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm index b2aad39f10..c3d4223641 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm @@ -1,14 +1,9 @@ use $kernel::account -use $kernel::account_id use $kernel::asset use $kernel::asset_vault +use $kernel::fungible_asset +use $kernel::non_fungible_asset use $kernel::memory -use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT - -# ERRORS -# ================================================================================================= - -const ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET="the burn_non_fungible_asset procedure can only be called on a non-fungible faucet" # FUNGIBLE ASSETS # ================================================================================================== @@ -16,12 +11,13 @@ const ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUC #! Mints a fungible asset associated with the fungible faucet the transaction is being executed #! against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - amount is the amount of the fungible asset to mint. -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset value to mint. +#! - NEW_ASSET_VALUE is ASSET_VALUE merged with the existing vault asset value, if any. #! #! Panics if: #! - the transaction is not being executed against a fungible faucet. @@ -31,26 +27,27 @@ pub proc mint_fungible_asset # assert that the asset was issued by the faucet the transaction is being executed against and # that the asset is valid exec.account::get_id - exec.asset::validate_fungible_asset_origin - # => [ASSET] + exec.fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] exec.memory::get_input_vault_root_ptr - movdn.4 - # => [ASSET, input_vault_root_ptr] + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] # add the asset to the input vault for asset preservation exec.asset_vault::add_fungible_asset - # => [ASSET] + # => [NEW_ASSET_VALUE] end #! Burns a fungible asset associated with the fungible faucet the transaction is being executed #! against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset to burn. #! #! Panics if: #! - the transaction is not being executed against a fungible faucet. @@ -61,16 +58,20 @@ proc burn_fungible_asset # assert that the asset is associated with the faucet the transaction is being executed against # and that the asset is valid exec.account::get_id - exec.asset::validate_fungible_asset_origin - # => [ASSET] + exec.fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] exec.memory::get_input_vault_root_ptr - movdn.4 - # => [ASSET, input_vault_root_ptr] + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] # remove the asset from the input vault for asset preservation exec.asset_vault::remove_fungible_asset - # => [ASSET] + # => [REMAINING_ASSET_VALUE] + + # drop the remaining value (not meaningful to the caller) + dropw + # => [] end # NON-FUNGIBLE ASSETS @@ -79,11 +80,14 @@ end #! Mints a non-fungible asset associated with the non-fungible faucet the transaction is being #! executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset value to mint. +#! - NEW_ASSET_VALUE is identical to ASSET_VALUE. This is to maintain API uniformity with +#! mint_fungible_asset. #! #! Panics if: #! - the transaction is not being executed against a non-fungible faucet. @@ -93,29 +97,27 @@ proc mint_non_fungible_asset # assert that the asset is associated with the faucet the transaction is being executed against # and that the asset is valid exec.account::get_id - swap drop - # => [faucet_id_prefix, ASSET] - - exec.asset::validate_non_fungible_asset_origin - # => [ASSET] + exec.non_fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] exec.memory::get_input_vault_root_ptr - movdn.4 - # => [ASSET, input_vault_root_ptr] + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] # add the non-fungible asset to the input vault for asset preservation exec.asset_vault::add_non_fungible_asset - # => [ASSET] + # => [NEW_ASSET_VALUE] end #! Burns a non-fungible asset associated with the non-fungible faucet the transaction is being #! executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset to burn. #! #! Panics if: #! - the transaction is not being executed against a non-fungible faucet. @@ -125,19 +127,20 @@ proc burn_non_fungible_asset # assert that the asset was issued by the faucet the transaction is being executed against and # that the asset is valid exec.account::get_id - swap drop - # => [faucet_id_prefix, ASSET] - - exec.asset::validate_non_fungible_asset_origin - # => [ASSET] + exec.non_fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] # remove the non-fungible asset from the input vault for asset preservation exec.memory::get_input_vault_root_ptr - movdn.4 - # => [ASSET, input_vault_root_ptr] + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] exec.asset_vault::remove_non_fungible_asset - # => [ASSET] + # => [REMAINING_ASSET_VALUE] + + # drop the remaining value (not meaningful to the caller) + dropw + # => [] end # PUBLIC INTERFACE @@ -145,11 +148,15 @@ end #! Mint an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset value to mint. +#! - NEW_ASSET_VALUE is: +#! - For fungible assets: the ASSET_VALUE merged with the existing vault asset value, if any. +#! - For non-fungible assets: identical to ASSET_VALUE. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -161,27 +168,28 @@ end #! - For non-fungible faucets if the non-fungible asset being minted already exists. pub proc mint # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true # mint the fungible asset exec.mint_fungible_asset - # => [ASSET] + # => [NEW_ASSET_VALUE] else # mint the non-fungible asset exec.mint_non_fungible_asset - # => [ASSET] + # => [NEW_ASSET_VALUE] end end #! Burn an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset to burn. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -194,16 +202,16 @@ end #! provided as input to the transaction via a note or the accounts vault. pub proc burn # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true # burn the fungible asset exec.burn_fungible_asset - # => [ASSET] + # => [] else # burn the non-fungible asset exec.burn_non_fungible_asset - # => [ASSET] + # => [] end end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/fungible_asset.masm b/crates/miden-protocol/asm/kernels/transaction/lib/fungible_asset.masm new file mode 100644 index 0000000000..a5e5f8d202 --- /dev/null +++ b/crates/miden-protocol/asm/kernels/transaction/lib/fungible_asset.masm @@ -0,0 +1,195 @@ +# Contains procedures for the built-in fungible asset. + +use $kernel::account_id +use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +use $kernel::asset + +# RE-EXPORTS +# ================================================================================================= + +pub use $kernel::util::asset::create_fungible_key->create_key +pub use $kernel::util::asset::create_fungible_asset_unchecked->create_unchecked +pub use $kernel::util::asset::fungible_to_amount->to_amount +pub use $kernel::util::asset::fungible_value_into_amount->value_into_amount + +# ERRORS +# ================================================================================================= + +const ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED="adding the fungible asset to the vault would exceed the max amount" + +const ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE = "fungible asset vault key's account ID must be of type fungible faucet" + +const ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the fungible asset is not this faucet" + +const ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT="fungible asset amount exceeds the maximum allowed amount" + +const ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO="fungible asset value elements 1, 2 and 3 must be zeros" + +const ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO="fungible asset key asset ID prefix and suffix must be zero" + +const ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW="failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove" + +# PROCEDURES +# ================================================================================================= + +#! Merges two fungible assets. +#! +#! WARNING: This procedure assumes the assets have been validated. +#! +#! Inputs: [ASSET_VALUE_0, ASSET_VALUE_1] +#! Outputs: [MERGED_ASSET_VALUE] +#! +#! Where: +#! - ASSET_VALUE_{0, 1} are the assets to merge. +#! - MERGED_ASSET_VALUE is the merged asset. +#! +#! Panics if: +#! - adding the two asset values would exceed FUNGIBLE_ASSET_MAX_AMOUNT. +pub proc merge + # extract amounts from assets + exec.value_into_amount movdn.4 exec.value_into_amount + # => [amount_1, amount_0] + + # compute max_add_amount = FUNGIBLE_ASSET_MAX_AMOUNT - amount_0 + # this is the amount that can at most be added to amount_0 still have a valid asset + dup push.FUNGIBLE_ASSET_MAX_AMOUNT dup.3 sub + # => [max_add_amount, amount_1, amount_1, amount_0] + + # assert it is safe to add the amounts together, i.e. amount_1 <= max_add_amount + lte assert.err=ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED + # => [amount_1, amount_0] + + # add the amounts + add + # => [merged_amount] + + # reconstruct the asset value + push.0.0.0 movup.3 + # => [MERGED_ASSET_VALUE] +end + +#! Computes ASSET_VALUE_0 - ASSET_VALUE_1 and returns the result. +#! +#! For instance, split(40, 100) returns 60. The operand order matches the `sub` instruction. +#! +#! WARNING: This procedure assumes the assets have been validated. +#! +#! Inputs: [ASSET_VALUE_1, ASSET_VALUE_0] +#! Outputs: [NEW_ASSET_VALUE_0] +#! +#! Where: +#! - ASSET_VALUE_{0, 1} are the assets to split. +#! - NEW_ASSET_VALUE_0 is the result of the split computation. +#! +#! Panics if: +#! - ASSET_VALUE_0 does not contain at least the amount of ASSET_VALUE_1. +pub proc split + # extract amounts from assets + exec.value_into_amount movdn.4 exec.value_into_amount swap + # => [amount_1, amount_0] + + # assert amount_1 <= amount_0 so we can safely subtract + dup dup.2 + # => [amount_0, amount_1, amount_1, amount_0] + + lte assert.err=ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW + # => [amount_1, amount_0] + + sub + # => [new_amount] + + # reconstruct the asset value + push.0.0.0 movup.3 + # => [NEW_ASSET_VALUE] +end + +#! Validates that a fungible asset is well formed. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. +#! +#! Panics if: +#! - the asset key is invalid (see validate_key). +#! - the three most significant elements in the value are not 0. +#! - the amount exceeds FUNGIBLE_ASSET_MAX_AMOUNT. +pub proc validate + exec.validate_key + # => [ASSET_KEY, ASSET_VALUE] + + dupw.1 + # => [ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + # assuming the asset is valid, its layout is: + # => [amount, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # assert amount <= FUNGIBLE_ASSET_MAX_AMOUNT + lte.FUNGIBLE_ASSET_MAX_AMOUNT + assert.err=ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT + # => [0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # assert the last three elements are zeros + eq.0 assert.err=ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO + eq.0 assert.err=ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO + eq.0 assert.err=ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Validates that a fungible asset's vault key is well formed. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! +#! Panics if: +#! - the asset key's account ID is not valid. +#! - the asset key's metadata is not valid. +#! - the asset key's faucet ID is not a fungible one. +pub proc validate_key + exec.asset::validate_issuer + # => [ASSET_KEY] + + exec.asset::is_fungible_asset_key + assert.err=ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE + # => [ASSET_KEY] + + exec.asset::key_to_asset_id + # => [asset_id_suffix, asset_id_prefix, ASSET_KEY] + + eq.0 assert.err=ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO + eq.0 assert.err=ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO + # => [ASSET_KEY] +end + +#! Validates that a fungible asset is associated with the provided faucet_id. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix, ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - faucet_id_{suffix,prefix} are the suffix and prefix of the faucet's account ID. +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. +pub proc validate_origin + movdn.9 movdn.9 + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the fungible asset key and value are valid + exec.validate + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the origin of the asset is the faucet_id provided via the stack + exec.asset::key_to_faucet_id + # => [key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + movup.11 movup.11 + # => [faucet_id_suffix, faucet_id_prefix, key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE] + + exec.account_id::is_equal assert.err=ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN + # => [ASSET_KEY, ASSET_VALUE] +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm b/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm index 3ed748cb27..edc3d78314 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm @@ -1,4 +1,3 @@ -use miden::core::collections::smt use miden::core::word use $kernel::memory @@ -119,7 +118,7 @@ use $kernel::memory # ERRORS # ================================================================================================= -const ERR_LINK_MAP_CANNOT_BE_EMPTY_ON_ABSENCE_AFTER_ENTRY="map cannot be empty when proving absence after an entry" +const ERR_LINK_MAP_EMPTY_MAP_REQUIRES_AT_HEAD_OPERATION="empty map requires operation InsertAtHead for set or AbsentAtHead for get" const ERR_LINK_MAP_PROVIDED_KEY_NOT_EQUAL_TO_ENTRY_KEY="provided key does not match key in map entry" @@ -197,47 +196,55 @@ const LINK_MAP_GET_EVENT=event("miden::protocol::link_map::get") #! #! Panics if: #! - the host provides faulty advice. See panic sections of assert_entry_ptr_is_valid, -#! update_entry, insert_at_head, insert_after_entry. +#! update_entry, insert_at_head, insert_after_entry and assert_empty_map_op_is_at_head. pub proc set emit.LINK_MAP_SET_EVENT adv_push.2 # => [operation, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] - dup.2 dup.2 - # => [entry_ptr, map_ptr, operation, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] + dup eq.INSERT_OPERATION_UPDATE swap eq.INSERT_OPERATION_AT_HEAD + # => [is_insert_at_head_op, is_insert_update_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] - exec.assert_entry_ptr_is_valid - # => [operation, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] + dup dup.4 + # => [map_ptr, is_insert_at_head_op, is_insert_at_head_op, is_insert_update_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] - dup eq.INSERT_OPERATION_AT_HEAD swap eq.INSERT_OPERATION_UPDATE - # => [is_insert_update_op, is_insert_at_head_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] + exec.assert_empty_map_op_is_at_head + # => [is_insert_at_head_op, is_insert_update_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] if.true - # update existing entry - # drop is_insert_at_head_op and map_ptr from stack - drop swap drop - # => [entry_ptr, KEY, VALUE0, VALUE1] + # drop is_insert_update_op and the unvalidated entry ptr since we can load the entry ptr + # at the head of the map securely from map_ptr + drop drop + # => [map_ptr, KEY, VALUE0, VALUE1] - exec.update_entry + exec.insert_at_head # => [] - push.0 + push.1 # => [is_new_key] else - # insert new entry - # => [is_insert_at_head_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] + # => [is_insert_update_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] + + dup.2 dup.2 + # => [entry_ptr, map_ptr, is_insert_update_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] + + # validate the entry pointer + # this can be skipped for insert_at_head because the entry_ptr is not used in that branch + exec.assert_entry_ptr_is_valid + # => [is_insert_update_op, entry_ptr, map_ptr, KEY, VALUE0, VALUE1] # note: the is_new_key flag logic is duplicated rather than appended after the if-else branch # to avoid introducing an extra MAST node if.true - # drop the entry ptr since we can load the head from map_ptr - drop - # => [map_ptr, KEY, VALUE0, VALUE1] + # update existing entry + # drop unused map_ptr + swap drop + # => [entry_ptr, KEY, VALUE0, VALUE1] - exec.insert_at_head + exec.update_entry # => [] - push.1 + push.0 # => [is_new_key] else # insert after existing entry @@ -264,47 +271,62 @@ end #! #! Panics if: #! - the host provides faulty advice. See panic sections of assert_entry_ptr_is_valid, -#! get_existing_value, assert_absent_at_head, assert_absent_after_entry. +#! get_existing_value, assert_absent_at_head, assert_absent_after_entry and +#! assert_empty_map_op_is_at_head. pub proc get emit.LINK_MAP_GET_EVENT adv_push.2 # => [get_operation, entry_ptr, map_ptr, KEY] - dup.2 dup.2 - # => [entry_ptr, map_ptr, get_operation, entry_ptr, map_ptr, KEY] + dup eq.GET_OPERATION_FOUND swap eq.GET_OPERATION_ABSENT_AT_HEAD + # => [is_absent_at_head, is_found, entry_ptr, map_ptr, KEY] - exec.assert_entry_ptr_is_valid - # => [get_operation, entry_ptr, map_ptr, KEY] + dup dup.4 + # => [map_ptr, is_absent_at_head, is_absent_at_head, is_found, entry_ptr, map_ptr, KEY] - dup eq.GET_OPERATION_ABSENT_AT_HEAD swap eq.GET_OPERATION_FOUND - # => [is_found, is_absent_at_head, entry_ptr, map_ptr, KEY] + exec.assert_empty_map_op_is_at_head + # => [is_absent_at_head, is_found, entry_ptr, map_ptr, KEY] if.true - # drop is_absent_at_head and map_ptr from stack - drop swap drop - # => [entry_ptr, KEY] + # drop is_found and the unvalidated entry ptr since we can load the entry ptr + # at the head of the map securely from map_ptr + drop drop + # => [map_ptr, KEY] - exec.get_existing_value - # => [VALUE0, VALUE1] + exec.assert_absent_at_head + # => [] - push.1 - # => [contains_key, VALUE0, VALUE1] + padw padw push.0 + # => [contains_key, EMPTY_WORD, EMPTY_WORD] else - # assert absence of the entry - # => [is_absent_at_head, entry_ptr, map_ptr, KEY] + # => [is_found, entry_ptr, map_ptr, KEY] + + dup.2 dup.2 + # => [entry_ptr, map_ptr, is_found, entry_ptr, map_ptr, KEY] + + # validate the entry pointer + # this can be skipped for assert_absent_at_head because the entry_ptr is not used in that + # branch + exec.assert_entry_ptr_is_valid + # => [is_found, entry_ptr, map_ptr, KEY] + + # drop unused map_ptr + movup.2 drop + # => [is_found, entry_ptr, KEY] # note: the flag and empty word logic is duplicated rather than appended after the if-else # branch to avoid introducing an extra MAST node if.true - drop - # => [map_ptr, KEY] + # => [entry_ptr, KEY] - exec.assert_absent_at_head - # => [] + exec.get_existing_value + # => [VALUE0, VALUE1] - padw padw push.0 - # => [contains_key, EMPTY_WORD, EMPTY_WORD] + push.1 + # => [contains_key, VALUE0, VALUE1] else + # => [entry_ptr, KEY] + exec.assert_absent_after_entry # => [] @@ -620,17 +642,13 @@ end #! If KEY is greater than the key in the entry and less than the key in entry's next entry, then #! that proves the absence of the key. #! -#! Inputs: [entry_ptr, map_ptr, KEY] +#! Inputs: [entry_ptr, KEY] #! Outputs: [] #! #! Panics if: -#! - the map is empty. #! - the KEY is not greater than the key in the entry. #! - the KEY is not less than the key in entry.next_entry, unless the entry is the last one in the map. proc assert_absent_after_entry - swap exec.is_empty assertz.err=ERR_LINK_MAP_CANNOT_BE_EMPTY_ON_ABSENCE_AFTER_ENTRY - # => [entry_ptr, KEY] - movdn.4 dupw # => [KEY, KEY, entry_ptr] @@ -723,10 +741,10 @@ proc set_value dup movdn.5 # => [entry_ptr, VALUE0, entry_ptr, VALUE1] - add.VALUE0_OFFSET mem_storew_be dropw + add.VALUE0_OFFSET mem_storew_le dropw # => [entry_ptr, VALUE1] - add.VALUE1_OFFSET mem_storew_be dropw + add.VALUE1_OFFSET mem_storew_le dropw # => [] end @@ -750,7 +768,7 @@ proc get_value0 padw movup.4 # => [entry_ptr, pad(4)] - add.VALUE0_OFFSET mem_loadw_be + add.VALUE0_OFFSET mem_loadw_le # => [VALUE0] end @@ -762,7 +780,7 @@ proc get_value1 padw movup.4 # => [entry_ptr, pad(4)] - add.VALUE1_OFFSET mem_loadw_be + add.VALUE1_OFFSET mem_loadw_le # => [VALUE1] end @@ -771,7 +789,7 @@ end #! Inputs: [entry_ptr, KEY] #! Outputs: [] proc set_key - add.KEY_OFFSET mem_storew_be dropw + add.KEY_OFFSET mem_storew_le dropw end #! Returns the key of the entry pointer. @@ -782,7 +800,7 @@ proc get_key padw movup.4 # => [entry_ptr, pad(4)] - add.KEY_OFFSET mem_loadw_be + add.KEY_OFFSET mem_loadw_le # => [KEY] end @@ -856,18 +874,48 @@ proc assert_key_is_less # => [] end +#! Asserts that if the map is empty, the host-provided operation is the `*AT_HEAD` variant: +#! - INSERT_OPERATION_AT_HEAD for link_map::set +#! - GET_OPERATION_ABSENT_AT_HEAD for link_map::get +#! +#! This ensures the other operations never have to consider the empty map case to reduce +#! complexity. +#! +#! What we want is: +#! is_empty_map | !is_at_head_op | outcome +#! true | true | panic +#! true | false | ok +#! false | true | ok +#! false | false | ok +#! +#! Inputs: [map_ptr, is_at_head_op] +#! Outputs: [] +#! +#! Panics if: +#! - the map is empty and the provided operation is not the *AT_HEAD variant. +proc assert_empty_map_op_is_at_head + exec.is_empty + # => [is_empty_map, is_at_head_op] + + swap not + # => [!is_at_head_op, is_empty_map] + + and assertz.err=ERR_LINK_MAP_EMPTY_MAP_REQUIRES_AT_HEAD_OPERATION + # => [] +end + #! Asserts that the given entry ptr is a valid entry in the map identified by map_ptr. #! #! Inputs: [entry_ptr, map_ptr] #! Outputs: [] #! #! Panics if: -#! - any of the following conditions is false, except if the map is empty: +#! - any of the following conditions is false: #! - LINK_MAP_MEMORY_START_PTR <= entry_ptr < LINK_MAP_MEMORY_END_PTR. #! - entry ptr is "link map entry"-aligned, i.e. entry_ptr % LINK_MAP_ENTRY_SIZE == 0. This #! works because every entry ptr is a multiple of LINK_MAP_ENTRY_SIZE. #! - entry's map ptr is equal to the given map_ptr. -pub proc assert_entry_ptr_is_valid +proc assert_entry_ptr_is_valid # Check entry pointer is in valid memory range. # ------------------------------------------------------------------------------------------------- @@ -892,16 +940,8 @@ pub proc assert_entry_ptr_is_valid u32lt and # => [is_entry_ptr_in_valid_range, entry_ptr, map_ptr] - # we have to skip the assertion if the map is empty - dup.2 exec.is_empty - # => [is_empty_map, is_entry_ptr_in_valid_range, entry_ptr, map_ptr] - - dup movdn.4 - # => [is_empty_map, is_entry_ptr_in_valid_range, entry_ptr, map_ptr, is_empty_map] - - # this assertion is always true if is_empty_map is true - or assert.err=ERR_LINK_MAP_ENTRY_PTR_IS_OUTSIDE_VALID_MEMORY_REGION - # => [entry_ptr, map_ptr, is_empty_map] + assert.err=ERR_LINK_MAP_ENTRY_PTR_IS_OUTSIDE_VALID_MEMORY_REGION + # => [entry_ptr, map_ptr] # Check that the entry pointer is aligned to link map entries. # ------------------------------------------------------------------------------------------------- @@ -911,19 +951,18 @@ pub proc assert_entry_ptr_is_valid # we assert that entry_ptr % LINK_MAP_ENTRY_SIZE == 0. # note: we previously asserted that entry_ptr fits in a u32 dup exec.memory::get_link_map_entry_size u32mod eq.0 - # => [is_entry_ptr_aligned, entry_ptr, map_ptr, is_empty_map] + # => [is_entry_ptr_aligned, entry_ptr, map_ptr] - # this assertion is always true if is_empty_map is true - dup.3 or assert.err=ERR_LINK_MAP_ENTRY_PTR_IS_NOT_ENTRY_ALIGNED - # => [entry_ptr, map_ptr, is_empty_map] + assert.err=ERR_LINK_MAP_ENTRY_PTR_IS_NOT_ENTRY_ALIGNED + # => [entry_ptr, map_ptr] # Check entry pointer's map ptr is equal to map_ptr. # ------------------------------------------------------------------------------------------------- # check if entry_ptr.map_ptr == map_ptr exec.get_map_ptr eq - # => [entry_contains_map_ptr, is_empty_map] + # => [entry_contains_map_ptr] - # this assertion is always true if is_empty_map is true - or assert.err=ERR_LINK_MAP_MAP_PTR_IN_ENTRY_DOES_NOT_MATCH_EXPECTED_MAP_PTR + assert.err=ERR_LINK_MAP_MAP_PTR_IN_ENTRY_DOES_NOT_MATCH_EXPECTED_MAP_PTR + # => [] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm b/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm index 1053323ea1..4efa51d1e9 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm @@ -1,14 +1,10 @@ use $kernel::constants::ACCOUNT_PROCEDURE_DATA_LENGTH use $kernel::constants::MAX_ASSETS_PER_NOTE use $kernel::constants::NOTE_MEM_SIZE +# use $kernel::types::AccountId use miden::core::mem -# TYPE ALIASES -# ================================================================================================= - -type AccountID = struct { prefix: felt, suffix: felt } -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } -type MemoryAddress = u32 +pub type AccountId = struct { prefix: felt, suffix: felt } # ERRORS # ================================================================================================= @@ -55,8 +51,8 @@ const OUTPUT_VAULT_ROOT_PTR = 8 # Pointer to the prefix and suffix of the ID of the foreign account which will be loaded during the # upcoming FPI call. This ID is updated during the `prepare_fpi_call` kernel procedure. -const UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR = 12 -const UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR = UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR + 1 +const UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR = 12 +const UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR = UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR + 1 # Pointer to the 16th input value (with index 15) of the foreign procedure which will be loaded # during the upcoming FPI call. This "buffer" value helps to work around the 15 value limitation of @@ -217,16 +213,16 @@ const ACCT_INITIAL_STORAGE_SLOTS_SECTION_OFFSET=1320 # Offset at which the account's active storage slot section is kept. This section contains the # current values of the account storage slots. -const ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET=2340 +const ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET=3360 # NATIVE ACCOUNT DELTA # ------------------------------------------------------------------------------------------------- # The link map pointer at which the delta of the fungible asset vault is stored. -const ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR=532480 +pub const ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR=532480 # The link map pointer at which the delta of the non-fungible asset vault is stored. -const ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR=ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR+4 +pub const ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR=ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR+4 # The section of link map pointers where storage map deltas are stored. # This section is offset by `slot index` to get the link map ptr for the storage map @@ -372,7 +368,7 @@ end #! Where: #! - INPUT_VAULT_ROOT is the input vault root. pub proc get_input_vault_root - padw mem_loadw_be.INPUT_VAULT_ROOT_PTR + padw mem_loadw_le.INPUT_VAULT_ROOT_PTR end #! Sets the input vault root. @@ -383,7 +379,7 @@ end #! Where: #! - INPUT_VAULT_ROOT is the input vault root. pub proc set_input_vault_root - mem_storew_be.INPUT_VAULT_ROOT_PTR + mem_storew_le.INPUT_VAULT_ROOT_PTR end #! Returns the pointer to the memory address at which the output vault root is stored. @@ -406,7 +402,7 @@ end #! Where: #! - OUTPUT_VAULT_ROOT is the output vault root. pub proc get_output_vault_root - padw mem_loadw_be.OUTPUT_VAULT_ROOT_PTR + padw mem_loadw_le.OUTPUT_VAULT_ROOT_PTR end #! Sets the output vault root. @@ -417,22 +413,22 @@ end #! Where: #! - OUTPUT_VAULT_ROOT is the output vault root. pub proc set_output_vault_root - mem_storew_be.OUTPUT_VAULT_ROOT_PTR + mem_storew_le.OUTPUT_VAULT_ROOT_PTR end #! Sets the ID of the foreign account which is going to be loaded during the upcoming FPI call. #! -#! Inputs: [foreign_account_id_prefix, foreign_account_id_suffix] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix] #! Outputs: [] #! #! Where: #! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign #! account whose procedure is going to be executed. -pub proc set_fpi_account_id(foreign_account_id: AccountID) - mem_store.UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR - # => [foreign_account_id_suffix] - +pub proc set_fpi_account_id(foreign_account_id: AccountId) mem_store.UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR + # => [foreign_account_id_prefix] + + mem_store.UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR # => [] end @@ -441,17 +437,17 @@ end #! WARNING: The ID felts may be zero. #! #! Inputs: [] -#! Outputs: [foreign_account_id_prefix, foreign_account_id_suffix] +#! Outputs: [foreign_account_id_suffix, foreign_account_id_prefix] #! #! Where: #! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign #! account whose procedure is going to be executed. -pub proc get_fpi_account_id() -> (AccountID) - mem_load.UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR - # => [foreign_account_id_suffix] - +pub proc get_fpi_account_id() -> AccountId mem_load.UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR - # => [foreign_account_id_prefix, foreign_account_id_suffix] + # => [foreign_account_id_prefix] + + mem_load.UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR + # => [foreign_account_id_suffix, foreign_account_id_prefix] end #! Sets the root of the foreign procedure which is going to be loaded during the upcoming FPI call. @@ -462,8 +458,8 @@ end #! Where: #! - FOREIGN_PROC_ROOT is the root of the foreign procedure which will be executed during the FPI #! call. -pub proc set_fpi_procedure_root(foreign_proc_root: BeWord) -> BeWord - mem_storew_be.UPCOMING_FOREIGN_PROCEDURE_PTR +pub proc set_fpi_procedure_root(foreign_proc_root: word) -> word + mem_storew_le.UPCOMING_FOREIGN_PROCEDURE_PTR end # GLOBAL INPUTS @@ -477,7 +473,7 @@ end #! Where: #! - BLOCK_COMMITMENT is the commitment of the transaction reference block. pub proc set_block_commitment - mem_storew_be.BLOCK_COMMITMENT_PTR + mem_storew_le.BLOCK_COMMITMENT_PTR end #! Returns the block commitment of the reference block. @@ -488,33 +484,33 @@ end #! Where: #! - BLOCK_COMMITMENT is the commitment of the transaction reference block. pub proc get_block_commitment - padw mem_loadw_be.BLOCK_COMMITMENT_PTR + padw mem_loadw_le.BLOCK_COMMITMENT_PTR end #! Sets the global ID of the native account. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. pub proc set_global_account_id - mem_store.GLOBAL_ACCOUNT_ID_PREFIX_PTR mem_store.GLOBAL_ACCOUNT_ID_SUFFIX_PTR + mem_store.GLOBAL_ACCOUNT_ID_PREFIX_PTR # => [] end #! Returns the global ID of the native account. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. pub proc get_global_account_id - mem_load.GLOBAL_ACCOUNT_ID_SUFFIX_PTR mem_load.GLOBAL_ACCOUNT_ID_PREFIX_PTR - # => [account_id_prefix, account_id_suffix] + mem_load.GLOBAL_ACCOUNT_ID_SUFFIX_PTR + # => [account_id_suffix, account_id_prefix] end #! Sets the native account commitment at the beginning of the transaction. @@ -525,7 +521,7 @@ end #! Where: #! - INIT_ACCOUNT_COMMITMENT is the initial account commitment. pub proc set_init_account_commitment - mem_storew_be.INIT_ACCOUNT_COMMITMENT_PTR + mem_storew_le.INIT_ACCOUNT_COMMITMENT_PTR end #! Returns the native account commitment at the beginning of the transaction. @@ -536,7 +532,7 @@ end #! Where: #! - INIT_ACCOUNT_COMMITMENT is the initial account commitment. pub proc get_init_account_commitment - padw mem_loadw_be.INIT_ACCOUNT_COMMITMENT_PTR + padw mem_loadw_le.INIT_ACCOUNT_COMMITMENT_PTR end #! Sets the initial account nonce. @@ -569,7 +565,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_VAULT_ROOT is the initial vault root of the native account. pub proc set_init_native_account_vault_root - mem_storew_be.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR + mem_storew_le.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR end #! Returns the vault root of the native account at the beginning of the transaction. @@ -580,7 +576,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_VAULT_ROOT is the initial vault root of the native account. pub proc get_init_native_account_vault_root - padw mem_loadw_be.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR + padw mem_loadw_le.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR end #! Returns the memory address of the vault root of the native account at the beginning of the @@ -604,7 +600,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT is the initial storage commitment of the native account. pub proc set_init_account_storage_commitment - mem_storew_be.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR + mem_storew_le.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR end #! Returns the storage commitment of the native account at the beginning of the transaction. @@ -615,7 +611,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT is the initial storage commitment of the native account. pub proc get_init_account_storage_commitment - padw mem_loadw_be.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR + padw mem_loadw_le.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR end #! Returns the input notes commitment. @@ -628,7 +624,7 @@ end #! Where: #! - INPUT_NOTES_COMMITMENT is the input notes commitment. pub proc get_input_notes_commitment - padw mem_loadw_be.INPUT_NOTES_COMMITMENT_PTR + padw mem_loadw_le.INPUT_NOTES_COMMITMENT_PTR end #! Sets the input notes' commitment. @@ -639,7 +635,7 @@ end #! Where: #! - INPUT_NOTES_COMMITMENT is the notes' commitment. pub proc set_nullifier_commitment - mem_storew_be.INPUT_NOTES_COMMITMENT_PTR + mem_storew_le.INPUT_NOTES_COMMITMENT_PTR end #! Returns the memory address of the transaction script root. @@ -661,7 +657,7 @@ end #! Where: #! - TX_SCRIPT_ROOT is the transaction script root. pub proc set_tx_script_root - mem_storew_be.TX_SCRIPT_ROOT_PTR + mem_storew_le.TX_SCRIPT_ROOT_PTR end #! Returns the transaction script arguments. @@ -673,7 +669,7 @@ end #! - TX_SCRIPT_ARGS is the word of values which could be used directly or could be used to obtain #! some values associated with it from the advice map. pub proc get_tx_script_args - padw mem_loadw_be.TX_SCRIPT_ARGS_PTR + padw mem_loadw_le.TX_SCRIPT_ARGS_PTR end #! Sets the transaction script arguments. @@ -685,7 +681,7 @@ end #! - TX_SCRIPT_ARGS is the word of values which could be used directly or could be used to obtain #! some values associated with it from the advice map. pub proc set_tx_script_args - mem_storew_be.TX_SCRIPT_ARGS_PTR + mem_storew_le.TX_SCRIPT_ARGS_PTR end #! Returns the auth procedure arguments. @@ -696,7 +692,7 @@ end #! Where: #! - AUTH_ARGS is the argument passed to the auth procedure. pub proc get_auth_args - padw mem_loadw_be.AUTH_ARGS_PTR + padw mem_loadw_le.AUTH_ARGS_PTR end #! Sets the auth procedure arguments. @@ -707,7 +703,7 @@ end #! Where: #! - AUTH_ARGS is the argument passed to the auth procedure. pub proc set_auth_args - mem_storew_be.AUTH_ARGS_PTR + mem_storew_le.AUTH_ARGS_PTR end # BLOCK DATA @@ -732,7 +728,7 @@ end #! Where: #! - PREV_BLOCK_COMMITMENT_PTR is the block commitment of the transaction reference block. pub proc get_prev_block_commitment - padw mem_loadw_be.PREV_BLOCK_COMMITMENT_PTR + padw mem_loadw_le.PREV_BLOCK_COMMITMENT_PTR end #! Returns the block number of the transaction reference block. @@ -773,15 +769,15 @@ end #! Returns the faucet ID of the native asset as defined in the transaction's reference block. #! #! Inputs: [] -#! Outputs: [native_asset_id_prefix, native_asset_id_suffix] +#! Outputs: [native_asset_id_suffix, native_asset_id_prefix] #! #! Where: #! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet ID that defines #! the native asset. pub proc get_native_asset_id - mem_load.NATIVE_ASSET_ID_SUFFIX_PTR mem_load.NATIVE_ASSET_ID_PREFIX_PTR - # => [native_asset_id_prefix, native_asset_id_suffix] + mem_load.NATIVE_ASSET_ID_SUFFIX_PTR + # => [native_asset_id_suffix, native_asset_id_prefix] end #! Returns the verification base fee from the transaction's reference block. @@ -804,7 +800,7 @@ end #! Where: #! - CHAIN_COMMITMENT is the chain commitment of the transaction reference block. pub proc get_chain_commitment - padw mem_loadw_be.CHAIN_COMMITMENT_PTR + padw mem_loadw_le.CHAIN_COMMITMENT_PTR end #! Returns the account db root of the transaction reference block. @@ -815,7 +811,7 @@ end #! Where: #! - ACCT_DB_ROOT is the account database root of the transaction reference block. pub proc get_account_db_root - padw mem_loadw_be.ACCT_DB_ROOT_PTR + padw mem_loadw_le.ACCT_DB_ROOT_PTR end #! Returns the nullifier db root of the transaction reference block. @@ -826,7 +822,7 @@ end #! Where: #! - NULLIFIER_ROOT is the nullifier root of the transaction reference block. pub proc get_nullifier_db_root - padw mem_loadw_be.NULLIFIER_ROOT_PTR + padw mem_loadw_le.NULLIFIER_ROOT_PTR end #! Returns the tx commitment of the transaction reference block. @@ -837,7 +833,7 @@ end #! Where: #! - TX_COMMITMENT is the tx commitment of the transaction reference block. pub proc get_tx_commitment - padw mem_loadw_be.TX_COMMITMENT_PTR + padw mem_loadw_le.TX_COMMITMENT_PTR end #! Returns the transaction kernel commitment of the transaction reference block. @@ -848,7 +844,7 @@ end #! Where: #! - TX_KERNEL_COMMITMENT is the sequential hash of the kernel procedures. pub proc get_tx_kernel_commitment - padw mem_loadw_be.TX_KERNEL_COMMITMENT_PTR + padw mem_loadw_le.TX_KERNEL_COMMITMENT_PTR end #! Returns the validator key commitment of the transaction reference block. @@ -859,7 +855,7 @@ end #! Where: #! - VALIDATOR_KEY_COMMITMENT is the public key commitment of the transaction reference block. pub proc get_validator_key_commitment - padw mem_loadw_be.VALIDATOR_KEY_COMMITMENT_PTR + padw mem_loadw_le.VALIDATOR_KEY_COMMITMENT_PTR end #! Returns the note root of the transaction reference block. @@ -870,7 +866,7 @@ end #! Where: #! - NOTE_ROOT is the note root of the transaction reference block. pub proc get_note_root - padw mem_loadw_be.NOTE_ROOT_PTR + padw mem_loadw_le.NOTE_ROOT_PTR end #! Sets the note root of the transaction reference block. @@ -881,7 +877,7 @@ end #! Where: #! - NOTE_ROOT is the note root of the transaction reference block. pub proc set_note_root - mem_storew_be.NOTE_ROOT_PTR + mem_storew_le.NOTE_ROOT_PTR end # CHAIN DATA @@ -1096,7 +1092,7 @@ end #! Returns the ID of the active account. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the active account. @@ -1104,38 +1100,38 @@ pub proc get_account_id exec.get_active_account_data_ptr # => [active_account_data_ptr] - dup add.ACCT_ID_SUFFIX_OFFSET mem_load - # => [account_id_suffix, active_account_data_ptr] + dup add.ACCT_ID_PREFIX_OFFSET mem_load + # => [account_id_prefix, active_account_data_ptr] - swap add.ACCT_ID_PREFIX_OFFSET mem_load - # => [account_id_prefix, account_id_suffix] + swap add.ACCT_ID_SUFFIX_OFFSET mem_load + # => [account_id_suffix, account_id_prefix] end #! Returns the ID of the native account of the transaction. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the native account #! of the transaction. pub proc get_native_account_id - mem_load.NATIVE_ACCOUNT_ID_SUFFIX_PTR mem_load.NATIVE_ACCOUNT_ID_PREFIX_PTR - # => [account_id_prefix, account_id_suffix] + mem_load.NATIVE_ACCOUNT_ID_SUFFIX_PTR + # => [account_id_suffix, account_id_prefix] end #! Sets the account ID and nonce. #! -#! Inputs: [account_id_prefix, account_id_suffix, 0, nonce] -#! Outputs: [account_id_prefix, account_id_suffix, 0, nonce] +#! Inputs: [nonce, 0, account_id_suffix, account_id_prefix] +#! Outputs: [nonce, 0, account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the active account. #! - nonce is the nonce of the active account. pub proc set_account_id_and_nonce exec.get_active_account_data_ptr add.ACCT_ID_AND_NONCE_OFFSET - mem_storew_be + mem_storew_le end #! Returns the nonce of the active account. @@ -1197,7 +1193,7 @@ end pub proc get_account_vault_root padw exec.get_active_account_data_ptr add.ACCT_VAULT_ROOT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the account vault root. @@ -1209,7 +1205,7 @@ end #! - ACCT_VAULT_ROOT is the account vault root to be set. pub proc set_account_vault_root exec.get_active_account_data_ptr add.ACCT_VAULT_ROOT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the memory pointer to the initial vault root of the active account. @@ -1254,7 +1250,7 @@ end pub proc get_account_code_commitment padw exec.get_active_account_data_ptr add.ACCT_CODE_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the code commitment of the account. @@ -1266,7 +1262,7 @@ end #! - CODE_COMMITMENT is the code commitment to be set. pub proc set_account_code_commitment exec.get_active_account_data_ptr add.ACCT_CODE_COMMITMENT_OFFSET - mem_storew_be + mem_storew_le end #! Sets the transaction expiration block number. @@ -1361,7 +1357,7 @@ end pub proc get_account_storage_commitment padw exec.get_active_account_data_ptr add.ACCT_STORAGE_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the account storage commitment. @@ -1373,7 +1369,7 @@ end #! - STORAGE_COMMITMENT is the account storage commitment. pub proc set_account_storage_commitment exec.get_active_account_data_ptr add.ACCT_STORAGE_COMMITMENT_OFFSET - mem_storew_be + mem_storew_le end #! Sets the dirty flag for the native account storage commitment. @@ -1504,28 +1500,6 @@ end ### ACCOUNT DELTA ################################################# -#! Returns the link map pointer to the fungible asset vault delta. -#! -#! Inputs: [] -#! Outputs: [account_delta_fungible_asset_ptr] -#! -#! Where: -#! - account_delta_fungible_asset_ptr is the link map pointer to the fungible asset vault delta. -pub proc get_account_delta_fungible_asset_ptr - push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR -end - -#! Returns the link map pointer to the non-fungible asset vault delta. -#! -#! Inputs: [] -#! Outputs: [account_delta_non_fungible_asset_ptr] -#! -#! Where: -#! - account_delta_non_fungible_asset_ptr is the link map pointer to the non-fungible asset vault delta. -pub proc get_account_delta_non_fungible_asset_ptr - push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR -end - #! Returns the link map pointer to the storage map delta of the storage map in the given slot index. #! #! Inputs: [slot_idx] @@ -1602,7 +1576,7 @@ end #! - note_ptr is the input note's the memory address. #! - NOTE_ID is the note's id. pub proc set_input_note_id - mem_storew_be + mem_storew_le end #! Computes a pointer to the memory address at which the nullifier associated a note with `idx` is @@ -1627,7 +1601,7 @@ end #! - idx is the index of the input note. #! - nullifier is the nullifier of the input note. pub proc get_input_note_nullifier - mul.4 padw movup.4 add.INPUT_NOTE_NULLIFIER_SECTION_PTR mem_loadw_be + mul.4 padw movup.4 add.INPUT_NOTE_NULLIFIER_SECTION_PTR mem_loadw_le end #! Returns a pointer to the start of the input note core data segment for the note located at the @@ -1654,7 +1628,7 @@ end pub proc get_input_note_script_root padw movup.4 add.INPUT_NOTE_SCRIPT_ROOT_OFFSET - mem_loadw_be + mem_loadw_le end #! Returns the memory address of the script root of an input note. @@ -1680,7 +1654,7 @@ end pub proc get_input_note_storage_commitment padw movup.4 add.INPUT_NOTE_STORAGE_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Returns the metadata of an input note located at the specified memory address. @@ -1694,7 +1668,7 @@ end pub proc get_input_note_metadata_header padw movup.4 add.INPUT_NOTE_METADATA_HEADER_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the metadata for an input note located at the specified memory address. @@ -1707,7 +1681,7 @@ end #! - NOTE_METADATA_HEADER is the metadata header of the input note. pub proc set_input_note_metadata_header add.INPUT_NOTE_METADATA_HEADER_OFFSET - mem_storew_be + mem_storew_le end #! Returns the attachment of an input note located at the specified memory address. @@ -1721,7 +1695,7 @@ end pub proc get_input_note_attachment padw movup.4 add.INPUT_NOTE_ATTACHMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the attachment for an input note located at the specified memory address. @@ -1734,7 +1708,7 @@ end #! - NOTE_ATTACHMENT is the attachment of the input note. pub proc set_input_note_attachment add.INPUT_NOTE_ATTACHMENT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the note's args. @@ -1748,7 +1722,7 @@ end pub proc get_input_note_args padw movup.4 add.INPUT_NOTE_ARGS_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the note args for an input note located at the specified memory address. @@ -1761,7 +1735,7 @@ end #! - NOTE_ARGS are optional note args of the input note. pub proc set_input_note_args add.INPUT_NOTE_ARGS_OFFSET - mem_storew_be + mem_storew_le end #! Returns the number of inputs of the note located at the specified memory address. @@ -1840,7 +1814,7 @@ end pub proc get_input_note_recipient padw movup.4 add.INPUT_NOTE_RECIPIENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the input note's recipient. @@ -1853,7 +1827,7 @@ end #! - RECIPIENT is the commitment to the note's script, storage and the serial number. pub proc set_input_note_recipient add.INPUT_NOTE_RECIPIENT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the assets commitment for the input note located at the specified memory address. @@ -1867,7 +1841,7 @@ end pub proc get_input_note_assets_commitment padw movup.4 add.INPUT_NOTE_ASSETS_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Returns the serial number for the input note located at the specified memory address. @@ -1881,7 +1855,7 @@ end pub proc get_input_note_serial_num padw movup.4 add.INPUT_NOTE_SERIAL_NUM_OFFSET - mem_loadw_be + mem_loadw_le end # OUTPUT NOTES @@ -1922,7 +1896,7 @@ end pub proc get_output_note_recipient padw movup.4 add.OUTPUT_NOTE_RECIPIENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the output note's recipient. @@ -1935,7 +1909,7 @@ end #! - RECIPIENT is the commitment to the note's script, storage and the serial number. pub proc set_output_note_recipient add.OUTPUT_NOTE_RECIPIENT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the output note's metadata. @@ -1951,7 +1925,7 @@ pub proc get_output_note_metadata_header # => [0, 0, 0, 0, note_ptr] movup.4 add.OUTPUT_NOTE_METADATA_HEADER_OFFSET # => [(note_ptr + offset), 0, 0, 0, 0] - mem_loadw_be + mem_loadw_le # => [METADATA_HEADER] end @@ -1965,7 +1939,7 @@ end #! - note_ptr is the memory address at which the output note data begins. pub proc set_output_note_metadata_header add.OUTPUT_NOTE_METADATA_HEADER_OFFSET - mem_storew_be + mem_storew_le end #! Sets the output note's attachment kind and scheme in the metadata header. @@ -1992,7 +1966,7 @@ end pub proc get_output_note_attachment padw movup.4 add.OUTPUT_NOTE_ATTACHMENT_OFFSET - mem_loadw_be + mem_loadw_le # => [ATTACHMENT] end @@ -2006,7 +1980,7 @@ end #! - note_ptr is the memory address at which the output note data begins. pub proc set_output_note_attachment add.OUTPUT_NOTE_ATTACHMENT_OFFSET - mem_storew_be + mem_storew_le dropw end @@ -2043,6 +2017,24 @@ pub proc set_output_note_num_assets mem_store end +#! Increments the number of assets in the output note by 1. +#! +#! Inputs: [note_ptr] +#! Outputs: [] +#! +#! Where: +#! - note_ptr is the memory address at which the output note data begins. +#! +#! Panics if: +#! - the number of assets exceeds the maximum allowed number of assets per note. +pub proc increment_output_note_num_assets + dup exec.get_output_note_num_assets add.1 + # => [num_assets + 1, note_ptr] + + swap exec.set_output_note_num_assets + # => [] +end + #! Returns the dirty flag for the assets commitment. #! #! This binary flag specifies whether the assets commitment stored in the specified note is @@ -2096,7 +2088,7 @@ end pub proc get_output_note_assets_commitment padw movup.4 add.OUTPUT_NOTE_ASSETS_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the output note assets commitment for the output note located at the specified memory @@ -2110,7 +2102,7 @@ end #! - ASSETS_COMMITMENT is the sequential hash of the padded assets of an output note. pub proc set_output_note_assets_commitment add.OUTPUT_NOTE_ASSETS_COMMITMENT_OFFSET - mem_storew_be + mem_storew_le end # KERNEL DATA diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/non_fungible_asset.masm b/crates/miden-protocol/asm/kernels/transaction/lib/non_fungible_asset.masm new file mode 100644 index 0000000000..9f825ce733 --- /dev/null +++ b/crates/miden-protocol/asm/kernels/transaction/lib/non_fungible_asset.masm @@ -0,0 +1,92 @@ +use $kernel::account_id +use $kernel::asset + +# ERRORS +# ================================================================================================= + +const ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE = "non-fungible asset vault key's account ID must be of type non-fungible faucet" + +const ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the non-fungible asset is not this faucet" + +const ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0="the asset ID suffix in a non-fungible asset's vault key must match hash0 of the asset value" + +const ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1="the asset ID prefix in a non-fungible asset's vault key must match hash1 of the asset value" + +# PROCEDURES +# ================================================================================================= + +#! Validates that a non fungible asset is well formed. +#! +#! The value itself is not validated since any value is valid. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. +#! +#! Panics if: +#! - the asset key's account ID is not valid. +#! - the asset key's faucet ID is not a non-fungible one. +#! - the asset ID suffix of the key does not match hash0 of the value. +#! - the asset ID prefix of the key does not match hash1 of the value. +pub proc validate + exec.validate_key + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix, hash0, hash1, hash2, hash3] + + # assert that hash0 matches asset_id_suffix and hash1 matches asset_id_prefix + dup.4 dup.1 assert_eq.err=ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0 + dup.5 dup.2 assert_eq.err=ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1 + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix, ASSET_VALUE] +end + +#! Validates that a non fungible asset's key is well formed. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! +#! Panics if: +#! - the asset key's account ID is not valid. +#! - the asset key's metadata is not valid. +#! - the asset key's faucet ID is not a non-fungible one. +pub proc validate_key + exec.asset::validate_issuer + # => [ASSET_KEY] + + exec.asset::is_non_fungible_asset_key + assert.err=ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE + # => [ASSET_KEY] +end + +#! Validates that a non-fungible asset is associated with the provided faucet_id. +#! +#! The value itself is not validated since any value is valid. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix, ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - faucet_id_{suffix,prefix} are the suffix and prefix of the faucet's account ID. +#! - ASSET_KEY is the vault key of the asset to validate. +pub proc validate_origin + movdn.9 movdn.9 + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the non-fungible asset key is valid + exec.validate + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the origin of the asset is the faucet_id provided via the stack + exec.asset::key_to_faucet_id + # => [key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + movup.11 movup.11 + # => [faucet_id_suffix, faucet_id_prefix, key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE] + + exec.account_id::is_equal assert.err=ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN + # => [ASSET_KEY, ASSET_VALUE] +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/note.masm b/crates/miden-protocol/asm/kernels/transaction/lib/note.masm index 8acbc7ab8b..a2fa70b19b 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/note.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/note.masm @@ -1,5 +1,6 @@ -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 +use $kernel::asset::ASSET_SIZE use $kernel::constants::NOTE_MEM_SIZE use $kernel::memory @@ -102,52 +103,20 @@ pub proc compute_output_note_assets_commitment # => [note_data_ptr] # duplicate note pointer and fetch num_assets - dup dup exec.memory::get_output_note_num_assets - # => [num_assets, note_data_ptr, note_data_ptr] - - # calculate the number of pairs of assets (takes ceiling if we have an odd number) - add.1 - u32assert.err=ERR_NOTE_NUM_OF_ASSETS_EXCEED_LIMIT - u32div.2 - # => [num_asset_pairs, note_data_ptr, note_data_ptr] - - # initiate counter for assets - push.0 - # => [asset_counter, num_asset_pairs, note_data_ptr, note_data_ptr] - - # prepare address and stack for reading assets - movup.2 exec.memory::get_output_note_asset_data_ptr padw padw padw - # => [PAD, PAD, PAD, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # check if we should loop - dup.14 dup.14 neq - # => [should_loop, PAD, PAD, PAD, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # loop and read assets from memory - while.true - # read assets from memory. - # if this is the last permutation of the loop and we have an odd number of assets then we - # implicitly pad the last word of the hasher rate with zeros by reading from empty memory. - mem_stream exec.rpo256::permute - # => [PERM, PERM, PERM, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # check if we should loop again - movup.13 add.1 dup movdn.14 dup.15 neq - # => [should_loop, PERM, PERM, PERM, asset_data_ptr, asset_counter, num_asset_pairs, - # note_data_ptr] - end - - # extract digest - exec.rpo256::squeeze_digest - # => [ASSETS_COMMITMENT, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # drop accessory variables from stack - movup.4 drop - movup.4 drop - movup.4 drop + dup exec.memory::get_output_note_asset_data_ptr + # => [assets_ptr, note_data_ptr] + + dup.1 exec.memory::get_output_note_num_assets + # => [num_assets, assets_ptr, note_data_ptr] + + # compute the asset_end_ptr + mul.ASSET_SIZE dup.1 add swap + # => [assets_ptr, assets_end_ptr, note_data_ptr] + + exec.poseidon2::hash_double_words # => [ASSETS_COMMITMENT, note_data_ptr] - # save the assets hash to memory + # save the assets commitment to memory dup.4 exec.memory::set_output_note_assets_commitment # => [ASSETS_COMMITMENT, note_data_ptr] @@ -177,24 +146,19 @@ end #! - note_data_ptr is a pointer to the data section of the output note. #! - NOTE_ID is the ID of the output note located at note_data_ptr. proc compute_output_note_id - # pad capacity elements of hasher - padw - # => [EMPTY_WORD, note_data_ptr] + # compute assets commitment + dup exec.compute_output_note_assets_commitment + # => [ASSETS_COMMITMENT, note_data_ptr] - # insert output note recipient into the first four elements of the hasher rate dup.4 exec.memory::get_output_note_recipient - # => [RECIPIENT, EMPTY_WORD, note_data_ptr] + # => [RECIPIENT, ASSETS_COMMITMENT, note_data_ptr] - # populate the last four elements of the hasher rate with the output note's asset commitment - dup.8 exec.compute_output_note_assets_commitment - # => [ASSETS_COMMITMENT, RECIPIENT, EMPTY_WORD, note_data_ptr] - - # compute output note commitment (which is also note ID) and extract digest - exec.rpo256::permute exec.rpo256::squeeze_digest + # compute output note ID + exec.poseidon2::merge # => [NOTE_ID, note_data_ptr] # save the output note commitment (note ID) to memory - movup.4 mem_storew_be + movup.4 mem_storew_le # => [NOTE_ID] end @@ -216,52 +180,56 @@ pub proc compute_output_notes_commitment # => [current_index = 0, num_notes] # prepare stack for hashing - padw padw padw - # => [PERM, PERM, PERM, current_index, num_notes] + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, current_index, num_notes] # starting looping if num_notes != 0 dup.13 neq.0 - # => [should_loop, PERM, PERM, PERM, current_index, num_notes] + # => [should_loop, RATE0, RATE1, CAPACITY, current_index, num_notes] # loop and hash output notes while.true dup.12 exec.memory::get_output_note_ptr - # => [current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + # => [current_note_ptr, RATE0, RATE1, CAPACITY, current_index, num_notes] # compute and save output note ID to memory (this also computes the note's asset commitment) dup exec.compute_output_note_id - # => [NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + # => [NOTE_ID, current_note_ptr, RATE0, RATE1, CAPACITY, current_index, num_notes] - dup.4 exec.memory::get_output_note_metadata_header - # => [NOTE_METADATA_HEADER, NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + dup.4 exec.memory::get_output_note_attachment + # => [NOTE_ATTACHMENT, NOTE_ID, current_note_ptr, RATE0, RATE1, CAPACITY, current_index, num_notes] - movup.8 exec.memory::get_output_note_attachment - # => [NOTE_ATTACHMENT, NOTE_METADATA_HEADER, NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + movup.8 exec.memory::get_output_note_metadata_header + # => [NOTE_METADATA_HEADER, NOTE_ATTACHMENT, NOTE_ID, RATE0, RATE1, CAPACITY, current_index, num_notes] # compute hash(NOTE_METADATA_HEADER || NOTE_ATTACHMENT) - exec.rpo256::merge - # => [NOTE_METADATA_COMMITMENT, NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + exec.poseidon2::merge + # => [NOTE_METADATA_COMMITMENT, NOTE_ID, RATE0, RATE1, CAPACITY, current_index, num_notes] # replace rate words with note ID and metadata commitment swapdw dropw dropw - # => [NOTE_METADATA_COMMITMENT, NOTE_ID, PERM, current_index, num_notes] + # => [NOTE_METADATA_COMMITMENT, NOTE_ID, CAPACITY, current_index, num_notes] + + # move note ID to the top of the stack + swapw + # => [NOTE_ID, NOTE_METADATA_COMMITMENT, CAPACITY, current_index, num_notes] - # permute over (note_id, note_metadata_commitment) - exec.rpo256::permute - # => [PERM, PERM, PERM, current_index, num_notes] + # permute over (NOTE_ID, NOTE_METADATA_COMMITMENT) + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY, current_index, num_notes] # increment current_index movup.12 add.1 movdn.12 - # => [PERM, PERM, PERM, current_index + 1, num_notes] + # => [RATE0, RATE1, CAPACITY, current_index + 1, num_notes] # continue looping if current_index != num_notes dup.13 dup.13 neq - # => [should_loop, PERM, PERM, PERM, current_index + 1, num_notes] + # => [should_loop, RATE0, RATE1, CAPACITY, current_index + 1, num_notes] end - # => [PERM, PERM, PERM, current_index + 1, num_notes] + # => [RATE0, RATE1, CAPACITY, current_index + 1, num_notes] # extract digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [OUTPUT_NOTES_COMMITMENT, current_index + 1, num_notes] # drop accessory variables from stack diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm b/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm index 40260d0f55..c8a80d5f06 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm @@ -1,13 +1,14 @@ use $kernel::account +use $kernel::asset +use $kernel::callbacks +use $kernel::fungible_asset use $kernel::memory use $kernel::note -use $kernel::asset use $kernel::constants::MAX_OUTPUT_NOTES_PER_TX -use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT use $kernel::util::note::ATTACHMENT_KIND_NONE -use $kernel::util::note::ATTACHMENT_KIND_WORD use $kernel::util::note::ATTACHMENT_KIND_ARRAY -use miden::core::mem +use $kernel::asset::ASSET_SIZE +use $kernel::asset::ASSET_VALUE_MEMORY_OFFSET use miden::core::word # CONSTANTS @@ -43,8 +44,6 @@ const ERR_OUTPUT_NOTE_ATTACHMENT_KIND_NONE_MUST_BE_EMPTY_WORD="attachment kind N const ERR_NOTE_INVALID_INDEX="failed to find note at the given index; index must be within [0, num_of_notes]" -const ERR_NOTE_FUNGIBLE_MAX_AMOUNT_EXCEEDED="adding a fungible asset to a note cannot exceed the max_amount of 9223372036854775807" - const ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS="non-fungible asset that already exists in the note cannot be added again" const ERR_NOTE_TAG_MUST_BE_U32="the note's tag must fit into a u32 so the 32 most significant bits of the felt must be zero" @@ -57,9 +56,9 @@ const NOTE_BEFORE_CREATED_EVENT=event("miden::protocol::note::before_created") # Event emitted after a new note is created. const NOTE_AFTER_CREATED_EVENT=event("miden::protocol::note::after_created") -# Event emitted before an ASSET is added to a note +# Event emitted before an asset is added to a note const NOTE_BEFORE_ADD_ASSET_EVENT=event("miden::protocol::note::before_add_asset") -# Event emitted after an ASSET is added to a note +# Event emitted after an asset is added to a note const NOTE_AFTER_ADD_ASSET_EVENT=event("miden::protocol::note::after_add_asset") # Event emitted before an ATTACHMENT is added to a note @@ -158,15 +157,15 @@ pub proc get_assets_info # word further to make the assets number even (the same way it is done in the # `note::compute_output_note_assets_commitment` procedure) movup.4 exec.memory::get_output_note_asset_data_ptr - # => [assets_data_ptr, ASSETS_COMMITMENT, num_assets] + # => [assets_start_ptr, ASSETS_COMMITMENT, num_assets] - dup dup.6 dup is_odd add - # => [padded_num_assets, assets_data_ptr, assets_data_ptr, ASSETS_COMMITMENT, num_assets] + movdn.4 dup.4 + # => [assets_start_ptr, ASSETS_COMMITMENT, assets_start_ptr, num_assets] - mul.4 add - # => [assets_end_ptr, assets_start_ptr, ASSETS_COMMITMENT, num_assets] + dup.6 mul.ASSET_SIZE add + # => [assets_end_ptr, ASSETS_COMMITMENT, assets_start_ptr, num_assets] - movdn.5 movdn.4 + movdn.5 # => [ASSETS_COMMITMENT, assets_start_ptr, assets_end_ptr, num_assets] # store the assets data to the advice map using ASSETS_COMMITMENT as a key @@ -178,68 +177,64 @@ pub proc get_assets_info # => [ASSETS_COMMITMENT, num_assets] end -#! Adds the ASSET to the note specified by the index. +#! Adds the asset to the note specified by the index. #! -#! Inputs: [note_idx, ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] #! Outputs: [] #! #! Where: +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! - note_idx is the index of the note to which the asset is added. -#! - ASSET can be a fungible or non-fungible asset. #! #! Panics if: #! - the note index points to a non-existent output note. -#! - the ASSET is malformed (e.g., invalid faucet ID). +#! - the asset key or value are malformed (e.g., invalid faucet ID). #! - the max amount of fungible assets is exceeded. #! - the non-fungible asset already exists in the note. #! - the total number of ASSETs exceeds the maximum of 256. pub proc add_asset # check if the note exists, it must be within [0, num_of_notes] - dup exec.memory::get_num_output_notes lte assert.err=ERR_NOTE_INVALID_INDEX - # => [note_idx, ASSET] + dup.8 exec.memory::get_num_output_notes lte assert.err=ERR_NOTE_INVALID_INDEX + # => [ASSET_KEY, ASSET_VALUE, note_idx] - # get a pointer to the memory address of the note at which the asset will be stored - dup movdn.5 exec.memory::get_output_note_ptr - # => [note_ptr, ASSET, note_idx] + # validate the asset + exec.asset::validate + # => [ASSET_KEY, ASSET_VALUE, note_idx] - # get current num of assets - dup exec.memory::get_output_note_num_assets movdn.5 - # => [note_ptr, ASSET, num_of_assets, note_idx] + # emit event to signal that a new asset is going to be added to the note + emit.NOTE_BEFORE_ADD_ASSET_EVENT + # => [ASSET_KEY, ASSET_VALUE, note_idx] - # validate the ASSET - movdn.4 exec.asset::validate_asset - # => [ASSET, note_ptr, num_of_assets, note_idx] + # prepare the stack for the callback + swapw dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, note_idx] - # emit event to signal that a new asset is going to be added to the note. - emit.NOTE_BEFORE_ADD_ASSET_EVENT - # => [ASSET, note_ptr, num_of_assets, note_idx] + dup.12 movdn.8 + # => [ASSET_KEY, ASSET_VALUE, note_idx, ASSET_KEY, note_idx] - # check if ASSET to add is fungible - exec.asset::is_fungible_asset - # => [is_fungible_asset?, ASSET, note_ptr, num_of_assets, note_idx] + # invoke the callback + exec.callbacks::on_before_asset_added_to_note + swapw + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, note_idx] - if.true - # ASSET to add is fungible - exec.add_fungible_asset - # => [note_ptr, note_idx] - else - # ASSET to add is non-fungible - exec.add_non_fungible_asset - # => [note_ptr, note_idx] - end - # => [note_ptr, note_idx] + movup.8 exec.memory::get_output_note_ptr dup + # => [note_ptr, note_ptr, ASSET_KEY, PROCESSED_ASSET_VALUE] - # update the assets commitment dirty flag to signal that the current assets commitment is not - # valid anymore - push.1 swap exec.memory::set_output_note_dirty_flag - # => [note_idx] + movdn.9 movdn.9 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, note_ptr, note_ptr] + + # add the asset to the note + exec.add_asset_raw + # => [note_ptr] - # emit event to signal that a new asset was added to the note. + # emit event to signal that a new asset was added to the note emit.NOTE_AFTER_ADD_ASSET_EVENT - # => [note_idx] + # => [note_ptr] - # drop the note index - drop + # update the assets commitment dirty flag to signal that the current assets commitment is not + # valid anymore + push.1 swap exec.memory::set_output_note_dirty_flag # => [] end @@ -338,20 +333,19 @@ pub proc build_metadata_header # Merge note type and sender ID suffix. # -------------------------------------------------------------------------------------------- - exec.account::get_id swap + exec.account::get_id # => [sender_id_suffix, sender_id_prefix, tag, note_type] # the lower bits of an account ID suffix are guaranteed to be zero, so we can safely use that # space to encode the note type - movup.3 add swap - # => [sender_id_prefix, sender_id_suffix_and_note_type, tag] + movup.3 add + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag] # Build metadata header. # -------------------------------------------------------------------------------------------- - movup.2 - push.ATTACHMENT_DEFAULT_KIND_AND_SCHEME - # => [attachment_kind_scheme, tag, sender_id_prefix, sender_id_suffix_and_note_type] + push.ATTACHMENT_DEFAULT_KIND_AND_SCHEME movdn.3 + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag, attachment_kind_scheme] # => [NOTE_METADATA_HEADER] end @@ -460,154 +454,124 @@ proc increment_num_output_notes # => [note_idx] end -#! Adds a fungible asset to a note. If the note already holds an asset issued by the same faucet id -#! the two quantities are summed up and the new quantity is stored at the old position in the note. -#! In the other case, the asset is stored at the next available position. -#! Returns the pointer to the note the asset was stored at. +#! Adds the asset to the note specified by the ptr. #! -#! Inputs: [ASSET, note_ptr, num_of_assets, note_idx] -#! Outputs: [note_ptr] +#! This procedure attempts to find an asset with the same key in the note's assets. +#! - If the asset is not found, the asset is appended at the end and the number of assets is +#! incremented. +#! - If the asset is found and the asset is +#! - fungible: the existing asset and the new asset are merged together. +#! - non-fungible: the procedure panics since non-fungible assets cannot be merged. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_ptr] +#! Outputs: [] #! #! Where: -#! - ASSET is the fungible asset to be added to the note. +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! - note_ptr is the pointer to the note the asset will be added to. -#! - num_of_assets is the current number of assets. -#! - note_idx is the index of the note the asset will be added to. #! #! Panics if -#! - the summed amounts exceed the maximum amount of fungible assets. -proc add_fungible_asset - dup.4 exec.memory::get_output_note_asset_data_ptr - # => [asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] +#! - asset is fungible and adding the two asset values would exceed FUNGIBLE_ASSET_MAX_AMOUNT. +#! - asset is non-fungible and the note already contains an asset with the same key. +proc add_asset_raw + dup.8 exec.memory::get_output_note_asset_data_ptr movdn.8 + # => [ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] + + # get the number of assets in the output note + dup.9 exec.memory::get_output_note_num_assets + # => [num_assets, ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] # compute the pointer at which we should stop iterating - dup dup.7 mul.4 add - # => [end_asset_ptr, asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] + mul.ASSET_SIZE dup.9 add movdn.9 + # => [ASSET_KEY, ASSET_VALUE, asset_ptr, asset_end_ptr, note_ptr] - # reorganize and pad the stack, prepare for the loop - movdn.5 movdn.5 padw dup.9 - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # initialize loop variable is_existing_asset to false + push.0 movdn.8 + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # compute the loop latch - dup dup.10 neq - # => [latch, asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, - # note_idx] + # enter loop if asset_ptr != asset_end_ptr + dup.10 dup.10 neq + # => [has_assets, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] while.true - mem_loadw_be - # => [STORED_ASSET, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - dup.4 eq - # => [are_equal, 0, 0, stored_amount, ASSET, end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets, note_idx] - - if.true - # add the asset quantity, we don't overflow here, bc both ASSETs are valid. - movup.2 movup.6 add - # => [updated_amount, 0, 0, faucet_id, 0, 0, end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets, note_idx] - - # check that we don't overflow bc we use lte - dup lte.FUNGIBLE_ASSET_MAX_AMOUNT - assert.err=ERR_NOTE_FUNGIBLE_MAX_AMOUNT_EXCEEDED - # => [updated_amount, 0, 0, faucet_id, 0, 0, end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets, note_idx] - - # prepare stack to store the "updated" ASSET'' with the new quantity - movdn.5 - # => [0, 0, ASSET'', end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - # decrease num_of_assets by 1 to offset incrementing it later - movup.9 sub.1 movdn.9 - # => [0, 0, ASSET'', end_asset_ptr, asset_ptr, note_ptr, num_of_assets - 1, note_idx] - - # end the loop we add 0's to the stack to have the correct number of elements - push.0.0 dup.9 push.0 - # => [0, asset_ptr, 0, 0, 0, 0, ASSET'', end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets - 1, note_idx] - else - # => [0, 0, stored_amount, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, - # note_idx] - - # drop ASSETs and increment the asset pointer - movup.2 drop push.0.0 movup.9 add.4 dup movdn.10 - # => [asset_ptr + 4, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr + 4, note_ptr, - # num_of_assets, note_idx] - - # check if we reached the end of the loop - dup dup.10 neq - end - end - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - # prepare stack for storing the ASSET - movdn.4 dropw - # => [asset_ptr, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - # store the fungible asset, either the combined ASSET or the new ASSET - mem_storew_be dropw drop drop - # => [note_ptr, num_of_assets, note_idx] - - # increase the number of assets in the note - swap add.1 dup.1 exec.memory::set_output_note_num_assets - # => [note_ptr, note_idx] -end + padw dup.13 mem_loadw_le + # => [STORED_ASSET_KEY, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] -#! Adds a non-fungible asset to a note at the next available position. -#! Returns the pointer to the note the asset was stored at. -#! -#! Inputs: [ASSET, note_ptr, num_of_assets, note_idx] -#! Outputs: [note_ptr, note_idx] -#! -#! Where: -#! - ASSET is the non-fungible asset to be added to the note. -#! - note_ptr is the pointer to the note the asset will be added to. -#! - num_of_assets is the current number of assets. -#! - note_idx is the index of the note the asset will be added to. -#! -#! Panics if: -#! - the non-fungible asset already exists in the note. -proc add_non_fungible_asset - dup.4 exec.memory::get_output_note_asset_data_ptr - # => [asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] + exec.word::test_eq + # => [is_matching_key, STORED_ASSET_KEY, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # compute the pointer at which we should stop iterating - dup dup.7 mul.4 add - # => [end_asset_ptr, asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] + # set is_existing_asset = is_matching_key + swap.13 drop dropw + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # reorganize and pad the stack, prepare for the loop - movdn.5 movdn.5 padw dup.9 - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # if is_existing_asset, increment asset_ptr by 0 (so the ptr points to the existing asset + # after the loop) + # if !is_existing_asset, increment asset_ptr by ASSET_SIZE + dup.8 not mul.ASSET_SIZE + # => [asset_size_or_0, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # compute the loop latch - dup dup.10 neq - # => [latch, asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, - # note_idx] + # compute asset_ptr + asset_size_or_0 + movup.10 add movdn.9 + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - while.true - # load the asset and compare - mem_loadw_be exec.word::test_eq - assertz.err=ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS - # => [ASSET', ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - # drop ASSET' and increment the asset pointer - dropw movup.5 add.4 dup movdn.6 padw movup.4 - # => [asset_ptr + 4, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr + 4, note_ptr, - # num_of_assets, note_idx] - - # check if we reached the end of the loop - dup dup.10 neq + # continue looping if (!is_existing_asset) && asset_ptr != asset_end_ptr + dup.10 dup.10 neq + # => [is_end_reached, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] + + dup.9 not and + # => [should_loop, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] end - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] + + # after the loop: + # if is_existing_asset: asset_ptr points to the entry where the existing asset is stored + # if !is_existing_asset: asset_ptr points to the entry where the asset can be appended, + # i.e. asset_ptr = asset_end_ptr - # prepare stack for storing the ASSET - movdn.4 dropw - # => [asset_ptr, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # discard asset end ptr + movup.10 drop + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, note_ptr] - # end of the loop reached, no error so we can store the non-fungible asset - mem_storew_be dropw drop drop - # => [note_ptr, num_of_assets, note_idx] + movup.8 + # => [is_existing_asset, ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] - # increase the number of assets in the note - swap add.1 dup.1 exec.memory::set_output_note_num_assets - # => [note_ptr, note_idx] + if.true + # if the asset exists, do not increment num assets + + # abort if the asset is non-fungible since it cannot be merged + exec.asset::is_fungible_asset_key + assert.err=ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS + # => [ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] + + # if the asset is fungible, merge the asset values + # overwrite asset key on the stack with the stored asset value + # note that asset_ptr already stores ASSET_KEY so there is no need to overwrite it + dup.8 add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le + # => [STORED_ASSET_VALUE, ASSET_VALUE, asset_ptr, note_ptr] + + # merge the two fungible assets + exec.fungible_asset::merge + # => [MERGED_ASSET_VALUE, asset_ptr, note_ptr] + + # store the merged asset value + movup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_storew_le dropw drop + # => [] + else + # if the asset does not exist, increment num assets and append the asset + + # increment number of assets + # this panics if the max allowed number of assets is exceeded + # this implicitly validates that asset_ptr is not out of bounds + movup.9 exec.memory::increment_output_note_num_assets + # => [ASSET_KEY, ASSET_VALUE, asset_ptr] + + # store ASSET_KEY + dup.8 mem_storew_le dropw + # => [ASSET_VALUE, asset_ptr] + + # store ASSET_VALUE + movup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_storew_le dropw + # => [] + end end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm b/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm index a565cd57a2..9381fb6359 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm @@ -1,19 +1,18 @@ use miden::core::mem use miden::core::collections::mmr -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::word use $kernel::account -use $kernel::account_delta use $kernel::account_id use $kernel::asset_vault +use $kernel::asset::ASSET_SIZE +use $kernel::asset use $kernel::constants::EMPTY_SMT_ROOT use $kernel::constants::MAX_ASSETS_PER_NOTE use $kernel::constants::MAX_INPUT_NOTES_PER_TX use $kernel::constants::MAX_NOTE_STORAGE_ITEMS use $kernel::constants::NOTE_TREE_DEPTH -use $kernel::constants::STORAGE_SLOT_TYPE_MAP -use $kernel::constants::STORAGE_SLOT_TYPE_VALUE use $kernel::memory # CONSTS @@ -145,7 +144,7 @@ proc process_kernel_data # AS => [] # extract the resulting hash - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [SEQ_KERNEL_PROC_HASH, kernel_procs_ptr', TX_KERNEL_COMMITMENT] # AS => [] @@ -201,22 +200,25 @@ proc process_block_data # read block data and compute its sub commitment # see `Advice stack` above for details. - padw padw padw - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - exec.rpo256::squeeze_digest + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [SUB_COMMITMENT, block_data_ptr', block_num] - # store the note root in memory - padw adv_loadw - dupw exec.memory::set_note_root dropw + # load and store the note root + padw adv_loadw exec.memory::set_note_root # => [NOTE_ROOT, SUB_COMMITMENT, block_data_ptr', block_num] + # move the note root to the top of the stack for merging + swapw + # => [SUB_COMMITMENT, NOTE_ROOT, block_data_ptr', block_num] + # merge the note root with the sub commitment to get the block commitment - exec.rpo256::merge + exec.poseidon2::merge # => [BLOCK_COMMITMENT, block_data_ptr', block_num] # assert that the block commitment matches the commitment in global inputs @@ -363,10 +365,10 @@ proc process_account_data # => [acct_data_ptr] # read account details and compute its digest. See `Advice stack` above for details. - padw padw padw - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - exec.rpo256::squeeze_digest + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [ACCOUNT_COMMITMENT, acct_data_ptr'] movup.4 drop @@ -490,23 +492,21 @@ proc authenticate_note # --------------------------------------------------------------------------------------------- # read data from advice and compute hash(BLOCK_SUB_COMMITMENT || NOTE_ROOT) - padw padw padw - adv_pipe exec.rpo256::permute - # => [PERM, COMPUTED_BLOCK_COMMITMENT, PERM, mem_ptr', BLOCK_COMMITMENT, NOTE_COMMITMENT] - - dropw - # => [COMPUTED_BLOCK_COMMITMENT, PERM, mem_ptr', BLOCK_COMMITMENT, NOTE_COMMITMENT] + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest + # => [COMPUTED_BLOCK_COMMITMENT, mem_ptr', BLOCK_COMMITMENT, NOTE_COMMITMENT] # assert the computed block commitment matches - movup.8 drop movupw.2 + movup.4 drop assert_eqw.err=ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR - # => [PERM, NOTE_COMMITMENT] + # => [NOTE_COMMITMENT] # Authenticate the NOTE_COMMITMENT # --------------------------------------------------------------------------------------------- # load the note root from memory - loc_loadw_be.4 swapw + padw loc_loadw_le.4 swapw # => [NOTE_COMMITMENT, NOTE_ROOT] # load the index of the note @@ -552,10 +552,10 @@ proc process_input_note_details # => [note_data_ptr] # read input note's data and compute its digest. See `Advice stack` above for details. - padw padw padw - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - exec.rpo256::squeeze_digest + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [NULLIFIER, note_data_ptr + 16] movup.4 drop @@ -577,24 +577,25 @@ end #! #! Inputs: #! Operand stack: [note_ptr] -#! Advice stack: [NOTE_ARGS, NOTE_METADATA_HEADER, NOTE_ATTACHMENT] +#! Advice stack: [NOTE_ARGS, NOTE_ATTACHMENT, NOTE_METADATA_HEADER] #! Outputs: -#! Operand stack: [NOTE_ATTACHMENT, NOTE_METADATA_HEADER] +#! Operand stack: [NOTE_METADATA_HEADER, NOTE_ATTACHMENT] #! Advice stack: [] #! #! Where: #! - note_ptr is the memory location for the input note. #! - NOTE_ARGS are the user arguments passed to the note. -#! - NOTE_METADATA_HEADER is the note's metadata. +#! - NOTE_METADATA_HEADER is the note's metadata header. +#! - NOTE_ATTACHMENT is the note's attachment. proc process_note_args_and_metadata padw adv_loadw dup.4 exec.memory::set_input_note_args dropw # => [note_ptr] - padw adv_loadw dup.4 exec.memory::set_input_note_metadata_header - # => [NOTE_METADATA_HEADER, note_ptr] + padw adv_loadw dup.4 exec.memory::set_input_note_attachment + # => [NOTE_ATTACHMENT] - padw adv_loadw movup.8 exec.memory::set_input_note_attachment - # => [NOTE_ATTACHMENT, NOTE_METADATA_HEADER] + padw adv_loadw movup.8 exec.memory::set_input_note_metadata_header + # => [NOTE_METADATA_HEADER, NOTE_ATTACHMENT, note_ptr] end #! Checks that the number of note storage is within limit and stores it to memory. @@ -628,83 +629,51 @@ end #! #! Inputs: #! Operand stack: [note_ptr] -#! Advice stack: [assets_count, ASSET_0, ..., ASSET_N] +#! Advice stack: [num_assets, ASSET_KEY_0, ASSET_VALUE_0, ..., ASSET_KEY_N, ASSET_VALUE_N] #! Outputs: #! Operand stack: [] #! Advice stack: [] #! #! Where: #! - note_ptr is the memory location for the input note. -#! - assets_count is the note's assets count. -#! - ASSET_0, ..., ASSET_N are the padded note's assets. +#! - num_assets is the number of note assets. +#! - ASSET_KEY_0, ASSET_VALUE_0, ..., ASSET_KEY_N, ASSET_VALUE_N are the note's assets. proc process_note_assets - # verify and save the assets count + # Validate num_assets and setup commitment computation. # --------------------------------------------------------------------------------------------- adv_push.1 - # => [assets_count, note_ptr] + # => [num_assets, note_ptr] - dup push.MAX_ASSETS_PER_NOTE lte + dup lte.MAX_ASSETS_PER_NOTE assert.err=ERR_PROLOGUE_NUMBER_OF_NOTE_ASSETS_EXCEEDS_LIMIT - # => [assets_count, note_ptr] + # => [num_assets, note_ptr] dup dup.2 exec.memory::set_input_note_num_assets - # => [assets_count, note_ptr] - - # round up the number of assets, to the its padded length - dup u32and.1 add - # => [rounded_num_assets, note_ptr] - - # read the note's assets - # --------------------------------------------------------------------------------------------- - - # Stack organization: - # - Top of the stack contains the hash state. The complete state is needed to extract the final - # hash. - # - Followed by the assets_ptr, with the target address used to pipe data from the advice - # provider. - # - Followed by a copy of the note_ptr for later use. - # - Followed by the loop variables, the current counter and rounded_num_assets, laid at this - # depth because dup.15 is an efficient operation. - - push.0 movup.2 - # => [note_ptr, counter, rounded_num_assets] - - dup exec.memory::get_input_note_assets_ptr - # => [assets_ptr, note_ptr, counter, rounded_num_assets] + # => [num_assets, note_ptr] - padw padw padw - # => [PERM, PERM, PERM, assets_ptr, note_ptr, counter, rounded_num_assets] + dup.1 exec.memory::get_input_note_assets_ptr swap + # => [num_assets, assets_ptr, note_ptr] - # loop condition: counter != rounded_num_assets - dup.15 dup.15 neq - # => [should_loop, PERM, PERM, PERM, assets_ptr, note_ptr, counter, rounded_num_assets] + mul.ASSET_SIZE dup.1 add swap + # => [assets_ptr, assets_end_ptr, note_ptr] - # loop and read assets from the advice provider - while.true - # read data and compute its digest. See `Advice stack` above for details. - adv_pipe exec.rpo256::permute - # => [PERM, PERM, PERM, assets_ptr+8, note_ptr, counter, rounded_num_assets] - - # update counter - swapw.3 movup.2 add.2 movdn.2 swapw.3 - # => [PERM, PERM, PERM, assets_ptr+8, note_ptr, counter+2, rounded_num_assets] - - # loop condition: counter != rounded_num_assets - dup.15 dup.15 neq - # => [should_loop, PERM, PERM, PERM, assets_ptr+8, note_ptr, counter+2, rounded_num_assets] - end - # => [PERM, PERM, PERM, assets_ptr+8n, note_ptr, counter+2n, rounded_num_assets] + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, assets_ptr, assets_end_ptr, note_ptr] - exec.rpo256::squeeze_digest - # => [ASSET_COMMITMENT_COMPUTED, assets_ptr+8n, note_ptr, counter+2n, rounded_num_assets] + # Compute assets commitment and validate. + # --------------------------------------------------------------------------------------------- - swapw drop movdn.2 drop drop - # => [note_ptr, ASSET_COMMITMENT_COMPUTED] + exec.mem::pipe_double_words_to_memory + exec.poseidon2::squeeze_digest + # => [COMPUTED_ASSETS_COMMITMENT, assets_ptr, note_ptr] - # VERIFY: computed ASSET_COMMITMENT matches the provided hash - exec.memory::get_input_note_assets_commitment + # assert COMPUTED_ASSETS_COMMITMENT matches the provided commitment + movup.6 exec.memory::get_input_note_assets_commitment assert_eqw.err=ERR_PROLOGUE_PROVIDED_INPUT_ASSETS_INFO_DOES_NOT_MATCH_ITS_COMMITMENT + # => [assets_ptr] + + drop # => [] end @@ -725,7 +694,7 @@ proc add_input_note_assets_to_vault dup.1 exec.memory::get_input_note_assets_ptr # => [assets_start_ptr, input_vault_root_ptr, note_ptr] - dup movup.3 exec.memory::get_input_note_num_assets mul.4 add swap + dup movup.3 exec.memory::get_input_note_num_assets mul.ASSET_SIZE add swap # => [assets_start_ptr, assets_end_ptr, input_vault_root_ptr] # add input note's assets to input vault @@ -739,20 +708,21 @@ proc add_input_note_assets_to_vault dup.2 # => [input_vault_root_ptr, assets_start_ptr, assets_end_ptr, input_vault_root_ptr] - padw dup.5 mem_loadw_be - # => [ASSET, input_vault_root_ptr, assets_start_ptr, assets_end_ptr, input_vault_root_ptr] + # load asset key and value + dup.1 exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr, assets_start_ptr, assets_end_ptr, input_vault_root_ptr] # the witnesses for the note assets should be added prior to transaction execution and so # there should be no need to fetch them lazily via an event. exec.asset_vault::add_asset dropw # => [assets_start_ptr, assets_end_ptr, input_vault_root_ptr] - add.4 - # => [assets_start_ptr+4, assets_end_ptr, input_vault_root_ptr] + add.ASSET_SIZE + # => [assets_start_ptr+ASSET_SIZE, assets_end_ptr, input_vault_root_ptr] # loop condition: assets_start_ptr != assets_end_ptr dup.1 dup.1 neq - # => [should_loop, assets_start_ptr+4, assets_end_ptr, input_vault_root_ptr] + # => [should_loop, assets_start_ptr+ASSET_SIZE, assets_end_ptr, input_vault_root_ptr] end drop drop drop @@ -766,26 +736,38 @@ end #! #! Where: #! - note_ptr is the memory location for the input note. -#! - NOTE_ID is the note's id, i.e. `hash(RECIPIENT || ASSET_COMMITMENT)`. +#! - NOTE_ID is the note's id, i.e. `hash(RECIPIENT || ASSETS_COMMITMENT)`. proc compute_input_note_id + # load all inputs on the stack + dup exec.memory::get_input_note_assets_commitment + dup.4 exec.memory::get_input_note_storage_commitment + dup.8 exec.memory::get_input_note_script_root + # => [SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] + + dup.12 padw + # => [EMPTY_WORD, note_ptr, SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] + + movup.4 exec.memory::get_input_note_serial_num + # => [SERIAL_NUM, EMPTY_WORD, SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] + # compute SERIAL_COMMITMENT: hash(SERIAL_NUMBER || EMPTY_WORD) - dup exec.memory::get_input_note_serial_num padw exec.rpo256::merge - # => [SERIAL_COMMITMENT, note_ptr] + exec.poseidon2::merge + # => [SERIAL_COMMITMENT, SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] - # compute MERGE_SCRIPT: hash(SERIAL_COMMITMENT || SCRIPT_ROOT) - dup.4 exec.memory::get_input_note_script_root exec.rpo256::merge - # => [MERGE_SCRIPT, note_ptr] + # compute SERIAL_SCRIPT_COMMITMENT: hash(SERIAL_COMMITMENT || SCRIPT_ROOT) + exec.poseidon2::merge + # => [SERIAL_SCRIPT_COMMITMENT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] - # compute RECIPIENT: hash(MERGE_SCRIPT || STORAGE_COMMITMENT) - dup.4 exec.memory::get_input_note_storage_commitment exec.rpo256::merge - # => [RECIPIENT, note_ptr] + # compute RECIPIENT: hash(SERIAL_SCRIPT_COMMITMENT || STORAGE_COMMITMENT) + exec.poseidon2::merge + # => [RECIPIENT, ASSETS_COMMITMENT, note_ptr] # store the recipient in memory - dup.4 exec.memory::set_input_note_recipient - # => [RECIPIENT, note_ptr] + movup.8 exec.memory::set_input_note_recipient + # => [RECIPIENT, ASSETS_COMMITMENT, note_ptr] - # compute NOTE_ID: hash(RECIPIENT || ASSET_COMMITMENT) - movup.4 exec.memory::get_input_note_assets_commitment exec.rpo256::merge + # compute NOTE_ID: hash(RECIPIENT || ASSETS_COMMITMENT) + exec.poseidon2::merge # => [NOTE_ID] end @@ -797,16 +779,16 @@ end #! commitment can be extracted. #! #! Inputs: -#! Operand stack: [idx, HASHER_CAPACITY] +#! Operand stack: [idx, CAPACITY] #! Advice stack: [ #! SERIAL_NUMBER, #! SCRIPT_ROOT, #! STORAGE_COMMITMENT, #! ASSETS_COMMITMENT, #! NOTE_ARGS, -#! NOTE_METADATA_HEADER, #! NOTE_ATTACHMENT, -#! assets_count, +#! NOTE_METADATA_HEADER, +#! num_assets, #! ASSET_0, ..., ASSET_N, #! is_authenticated, #! ( @@ -816,13 +798,12 @@ end #! )? #! ] #! Outputs: -#! Operand stack: [PERM, PERM, PERM] +#! Operand stack: [RATE0, RATE1, CAPACITY] #! Advice stack: [] #! #! Where: #! - idx is the index of the input note. -#! - HASHER_CAPACITY is the state of the hasher capacity word, with the commitment to the previous -#! notes. +#! - CAPACITY is the state of the hasher capacity word, with the commitment to the previous notes. #! - SERIAL_NUMBER is the note's serial. #! - SCRIPT_ROOT is the note's script root. #! - STORAGE_COMMITMENT is the sequential hash of the padded note's storage. @@ -830,7 +811,7 @@ end #! - NOTE_METADATA_HEADER is the note's metadata header. #! - NOTE_ATTACHMENT is the note's attachment. #! - NOTE_ARGS are the user arguments passed to the note. -#! - assets_count is the note's assets count. +#! - num_assets is the number of note assets. #! - ASSET_0, ..., ASSET_N are the padded note's assets. #! - is_authenticated is the boolean indicating if the note contains an authentication proof. #! - optional values, required if `is_authenticated` is true: @@ -842,78 +823,81 @@ proc process_input_note # --------------------------------------------------------------------------------------------- dup exec.memory::get_input_note_ptr dup - # => [note_ptr, note_ptr, idx, HASHER_CAPACITY] + # => [note_ptr, note_ptr, idx, CAPACITY] exec.process_input_note_details - # => [NULLIFIER, note_ptr, idx, HASHER_CAPACITY] + # => [NULLIFIER, note_ptr, idx, CAPACITY] # save NULLIFIER to memory - movup.5 exec.memory::get_input_note_nullifier_ptr mem_storew_be - # => [NULLIFIER, note_ptr, HASHER_CAPACITY] + movup.5 exec.memory::get_input_note_nullifier_ptr mem_storew_le + # => [NULLIFIER, note_ptr, CAPACITY] # note metadata & args # --------------------------------------------------------------------------------------------- movup.4 - # => [note_ptr, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NULLIFIER, CAPACITY] dup exec.process_note_args_and_metadata - # => [NOTE_ATTACHMENT, NOTE_METADATA_HEADER, note_ptr, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_METADATA_HEADER, NOTE_ATTACHMENT, note_ptr, NULLIFIER, CAPACITY] # compute hash(NOTE_METADATA_HEADER || NOTE_ATTACHMENT) - exec.rpo256::merge - # => [NOTE_METADATA_COMMITMENT, note_ptr, NULLIFIER, HASHER_CAPACITY] + exec.poseidon2::merge + # => [NOTE_METADATA_COMMITMENT, note_ptr, NULLIFIER, CAPACITY] movup.4 - # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note number of storage items # --------------------------------------------------------------------------------------------- exec.process_note_num_storage_items - # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note assets # --------------------------------------------------------------------------------------------- dup exec.process_note_assets dup exec.add_input_note_assets_to_vault - # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note id # --------------------------------------------------------------------------------------------- dup exec.compute_input_note_id - # => [NOTE_ID, note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_ID, note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # save note id to memory movup.4 exec.memory::set_input_note_id - # => [NOTE_ID, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_ID, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note authentication # --------------------------------------------------------------------------------------------- # NOTE_COMMITMENT: `hash(NOTE_ID || NOTE_METADATA_COMMITMENT)` - swapw exec.rpo256::merge - # => [NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + exec.poseidon2::merge + # => [NOTE_COMMITMENT, NULLIFIER, CAPACITY] adv_push.1 - # => [is_authenticated, NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [is_authenticated, NOTE_COMMITMENT, NULLIFIER, CAPACITY] if.true - # => [NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_COMMITMENT, NULLIFIER, CAPACITY] exec.authenticate_note - # => [NULLIFIER, HASHER_CAPACITY] + # => [NULLIFIER, CAPACITY] padw - # => [EMPTY_WORD, NULLIFIER, HASHER_CAPACITY] + # => [EMPTY_WORD, NULLIFIER, CAPACITY] end - # => [EMPTY_WORD_OR_NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [EMPTY_WORD_OR_NOTE_COMMITMENT, NULLIFIER, CAPACITY] + + swapw + # => [NULLIFIER, EMPTY_WORD_OR_NOTE_COMMITMENT, CAPACITY] - # update the input note commitment - exec.rpo256::permute - # => [PERM, PERM, PERM] + # update the input notes commitment with hash(NULLIFIER || EMPTY_WORD_OR_NOTE_COMMITMENT) + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] end #! Process the input notes data provided via the advice provider. This involves reading the data @@ -978,12 +962,14 @@ proc process_input_notes_data # - The [idx, num_notes] pair is kept in a word boundary, so that its word can be swapped with a # cheap swapw.3 instruction to update the `idx` counter. - push.0 padw padw padw - # => [PERM, PERM, PERM, idx, num_notes] + # start at note index 0 + push.0 + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, idx, num_notes] # loop condition: idx != num_notes dup.13 dup.13 neq - # => [has_more_notes, PERM, PERM, PERM, idx, num_notes] + # => [has_more_notes, RATE0, RATE1, CAPACITY, idx, num_notes] while.true # the hasher operates in overwrite mode, so discard the rate words, and keep the capacity @@ -992,18 +978,18 @@ proc process_input_notes_data # process the note dup.4 exec.process_input_note - # => [PERM, PERM, PERM, idx, num_notes] + # => [RATE0, RATE1, CAPACITY, idx, num_notes] # update the idx counter swapw.3 add.1 swapw.3 - # => [PERM, PERM, PERM, idx+1, num_notes] + # => [RATE0, RATE1, CAPACITY, idx+1, num_notes] # loop condition: idx != num_notes dup.13 dup.13 neq - # => [has_more_notes, PERM, PERM, PERM, idx+1, num_notes] + # => [has_more_notes, RATE0, RATE1, CAPACITY, idx+1, num_notes] end - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [INPUT_NOTES_COMMITMENT, idx+1, num_notes] # assert the input notes and the commitment matches diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm b/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm index a10c15791a..aaed2d01fc 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm @@ -152,9 +152,9 @@ end #! account stack, which makes the foreign account the active account. #! #! Inputs: -#! Operand stack: [foreign_account_id_prefix, foreign_account_id_suffix] +#! Operand stack: [foreign_account_id_suffix, foreign_account_id_prefix] #! Advice map: { -#! FOREIGN_ACCOUNT_ID: [[foreign_account_id_suffix, foreign_account_id_prefix, 0, account_nonce], +#! FOREIGN_ACCOUNT_ID_KEY: [[foreign_account_id_suffix, foreign_account_id_prefix, 0, account_nonce], #! VAULT_ROOT, STORAGE_ROOT, CODE_ROOT], #! STORAGE_ROOT: [[STORAGE_SLOT_DATA]], #! CODE_ROOT: [num_procs, [ACCOUNT_PROCEDURE_DATA]] @@ -165,11 +165,8 @@ end #! Where: #! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign #! account whose procedure is going to be executed. -#! - FOREIGN_ACCOUNT_ID is the word constructed from the foreign_account_id as follows: -#! [0, 0, foreign_account_id_prefix, foreign_account_id_suffix]. Notice that the actual advice map -#! key is reversed: [foreign_account_id_suffix, foreign_account_id_prefix, 0, 0]. That is the -#! specificity of the `adv.push_mapval` instruction which takes the top stack word in the -#! big-endian (reversed) order. +#! - FOREIGN_ACCOUNT_ID_KEY is the map key constructed from the foreign_account_id as done by +#! account::create_id_key. #! - account_nonce is the nonce of the foreign account. #! - VAULT_ROOT is the commitment of the foreign account's vault. #! - STORAGE_ROOT is the commitment of the foreign account's storage. @@ -185,14 +182,14 @@ end pub proc start_foreign_context # get the memory address and a flag whether this account was already loaded. exec.account::get_account_data_ptr - # OS => [was_loaded, ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # OS => [was_loaded, ptr, foreign_account_id_suffix, foreign_account_id_prefix] if.true exec.memory::push_ptr_to_account_stack drop drop # OS => [] else exec.memory::push_ptr_to_account_stack - # OS => [foreign_account_id_prefix, foreign_account_id_suffix] + # OS => [foreign_account_id_suffix, foreign_account_id_prefix] # load the advice data into the active account memory section exec.account::load_foreign_account @@ -208,9 +205,7 @@ end #! Ends a foreign account context. #! -#! This pops the top of the account stack, making the previous account the active account, and -#! resets the foreign procedure info (foreign account ID and foreign procedure root) in the kernel -#! memory. +#! This pops the top of the account stack, making the previous account the active account. #! #! Inputs: [] #! Outputs: [] @@ -222,7 +217,14 @@ end pub proc end_foreign_context exec.memory::pop_ptr_from_account_stack # => [] +end +#! Resets the foreign procedure info (foreign account ID and foreign procedure root) in the kernel +#! memory to zeros. +#! +#! Inputs: [] +#! Outputs: [] +proc clear_fpi_memory # set the upcoming foreign account ID to zero push.0 push.0 exec.memory::set_fpi_account_id # => [] diff --git a/crates/miden-protocol/asm/kernels/transaction/main.masm b/crates/miden-protocol/asm/kernels/transaction/main.masm index fe0c0d0383..e0f0313ebb 100644 --- a/crates/miden-protocol/asm/kernels/transaction/main.masm +++ b/crates/miden-protocol/asm/kernels/transaction/main.masm @@ -52,9 +52,13 @@ const EPILOGUE_END_EVENT=event("miden::protocol::tx::epilogue_end") #! BLOCK_COMMITMENT, #! INITIAL_ACCOUNT_COMMITMENT, #! INPUT_NOTES_COMMITMENT, -#! account_id_prefix, account_id_suffix, block_num, pad(1) +#! account_id_suffix, account_id_prefix, block_num, pad(1) +#! ] +#! Outputs: [ +#! OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, +#! native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num, +#! pad(4) #! ] -#! Outputs: [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num, pad(3)] #! #! Where: #! - BLOCK_COMMITMENT is the reference block for the transaction execution. @@ -65,7 +69,10 @@ const EPILOGUE_END_EVENT=event("miden::protocol::tx::epilogue_end") #! - OUTPUT_NOTES_COMMITMENT is the commitment to the notes created by the transaction. #! - ACCOUNT_UPDATE_COMMITMENT is the hash of the the final account commitment and account #! delta commitment. -#! - FEE_ASSET is the fungible asset used as the transaction fee. +#! - fee_amount is the computed fee amount of the transaction denominated in the native asset. +#! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet that issues the +#! native asset. +#! - tx_expiration_block_num is the transaction expiration block number. @locals(1) proc main # Prologue @@ -130,7 +137,7 @@ proc main # get the memory address of the transaction script root and load it to the stack exec.memory::get_tx_script_root_ptr - padw dup.4 mem_loadw_be + padw dup.4 mem_loadw_le # => [TX_SCRIPT_ROOT, tx_script_root_ptr, pad(16)] exec.word::eqz not @@ -167,11 +174,13 @@ proc main # execute the transaction epilogue exec.epilogue::finalize_transaction - # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num, pad(16)] + # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, + # native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num, pad(16)] # truncate the stack to contain 16 elements in total - repeat.13 movup.13 drop end - # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num, pad(3)] + repeat.3 movupw.3 dropw end + # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, + # native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num, pad(4)] emit.EPILOGUE_END_EVENT end diff --git a/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm b/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm index b0b12ea8cf..b51ea9a44f 100644 --- a/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm +++ b/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm @@ -24,7 +24,7 @@ const ERR_TX_TRANSACTION_SCRIPT_IS_MISSING="the transaction script is missing" #! BLOCK_COMMITMENT, #! INITIAL_ACCOUNT_COMMITMENT, #! INPUT_NOTES_COMMITMENT, -#! account_id_prefix, account_id_suffix, block_num, +#! account_id_suffix, account_id_prefix, block_num, #! ] #! Outputs: [] #! @@ -46,7 +46,7 @@ proc main # get the memory address of the transaction script root and load it to the stack exec.memory::get_tx_script_root_ptr - padw dup.4 mem_loadw_be + padw dup.4 mem_loadw_le # => [TX_SCRIPT_ROOT, tx_script_root_ptr] # return an error if the transaction script was not specified diff --git a/crates/miden-protocol/asm/protocol/active_account.masm b/crates/miden-protocol/asm/protocol/active_account.masm index 2c71342ae6..724700ece3 100644 --- a/crates/miden-protocol/asm/protocol/active_account.masm +++ b/crates/miden-protocol/asm/protocol/active_account.masm @@ -1,4 +1,5 @@ -use ::miden::protocol::account_id +use miden::protocol::account_id +use miden::protocol::asset use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_ID_OFFSET use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_NONCE_OFFSET use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_COMMITMENT_OFFSET @@ -35,10 +36,10 @@ const ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE #! Returns the ID of the active account. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the active account. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the ID of the active account. #! #! Invocation: exec pub proc get_id @@ -54,11 +55,11 @@ pub proc get_id # => [offset, is_native = 0, pad(14)] syscall.exec_kernel_proc - # => [account_id_prefix, account_id_suffix, pad(14)] + # => [account_id_suffix, account_id_prefix, pad(14)] # clean the stack swapdw dropw dropw swapw dropw movdn.3 movdn.3 drop drop - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] end #! Returns the nonce of the active account. @@ -300,11 +301,11 @@ end #! Gets an item from the active account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! @@ -314,14 +315,14 @@ end #! Invocation: exec pub proc get_item push.0 movdn.2 - # => [slot_id_prefix, slot_id_suffix, 0] + # => [slot_id_suffix, slot_id_prefix, 0] push.ACCOUNT_GET_ITEM_OFFSET - # => [offset, slot_id_prefix, slot_id_suffix, 0] + # => [offset, slot_id_suffix, slot_id_prefix, 0] # pad the stack padw swapw padw padw swapdw - # => [offset, slot_id_prefix, slot_id_suffix, pad(13)] + # => [offset, slot_id_suffix, slot_id_prefix, pad(13)] syscall.exec_kernel_proc # => [VALUE, pad(12)] @@ -334,11 +335,11 @@ end #! Gets the initial item from the active account storage slot as it was at the beginning of the #! transaction. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. #! @@ -348,14 +349,14 @@ end #! Invocation: exec pub proc get_initial_item push.0 movdn.2 - # => [slot_id_prefix, slot_id_suffix, 0] + # => [slot_id_suffix, slot_id_prefix, 0] push.ACCOUNT_GET_INITIAL_ITEM_OFFSET - # => [offset, slot_id_prefix, slot_id_suffix, 0] + # => [offset, slot_id_suffix, slot_id_prefix, 0] # pad the stack padw swapw padw padw swapdw - # => [offset, slot_id_prefix, slot_id_suffix, pad(13)] + # => [offset, slot_id_suffix, slot_id_prefix, pad(13)] syscall.exec_kernel_proc # => [INIT_VALUE, pad(12)] @@ -367,11 +368,11 @@ end #! Gets a map item from the active account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - KEY is the key of the item to get. @@ -384,11 +385,11 @@ end #! Invocation: exec pub proc get_map_item push.ACCOUNT_GET_MAP_ITEM_OFFSET - # => [offset, slot_id_prefix, slot_id_suffix, KEY] + # => [offset, slot_id_suffix, slot_id_prefix, KEY] # pad the stack push.0 movdn.7 padw padw swapdw - # => [0, offset, slot_id_prefix, slot_id_suffix, KEY, pad(9)] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, pad(9)] syscall.exec_kernel_proc # => [VALUE, pad(12)] @@ -401,11 +402,11 @@ end #! Gets the initial VALUE from the active account storage map as it was at the beginning of the #! transaction. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - KEY is the key of the item to get. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. @@ -417,10 +418,10 @@ end #! Invocation: exec pub proc get_initial_map_item push.ACCOUNT_GET_INITIAL_MAP_ITEM_OFFSET - # => [offset, slot_id_prefix, slot_id_suffix, KEY] + # => [offset, slot_id_suffix, slot_id_prefix, KEY] push.0 movdn.7 padw padw swapdw - # => [0, offset, slot_id_prefix, slot_id_suffix, KEY, pad(9)] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, pad(9)] syscall.exec_kernel_proc # => [INIT_VALUE, pad(12)] @@ -433,14 +434,15 @@ end # VAULT # ------------------------------------------------------------------------------------------------- -#! Returns the asset associated with the provided faucet_id in the active account's vault. +#! Returns the asset associated with the provided asset vault key in the active account's vault. #! #! Inputs: [ASSET_KEY] -#! Outputs: [ASSET] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset from the vault, which can be the EMPTY_WORD if it isn't present. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't +#! present. #! #! Invocation: exec pub proc get_asset @@ -453,22 +455,23 @@ pub proc get_asset # => [offset, ASSET_KEY, pad(11)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end -#! Returns the asset issued by the provided faucet_id in the active account's vault at the -#! beginning of the transaction. +#! Returns the asset associated with the provided asset vault key in the active account's vault at +#! the beginning of the transaction. #! #! Inputs: [ASSET_KEY] -#! Outputs: [ASSET] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - ASSET_KEY is the asset vault key of the asset to fetch. -#! - ASSET is the asset at the beginning of the transaction. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't +#! present. #! #! Invocation: exec pub proc get_initial_asset @@ -481,21 +484,21 @@ pub proc get_initial_asset # => [offset, ASSET_KEY, pad(11)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end #! Returns the balance of the fungible asset associated with the provided faucet_id in the active #! account's vault. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix] +#! Inputs: [faucet_id_suffix, faucet_id_prefix] #! Outputs: [balance] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet ID of the fungible +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet ID of the fungible #! asset of interest. #! - balance is the vault balance of the fungible asset. #! @@ -505,29 +508,33 @@ end #! Invocation: exec pub proc get_balance # assert that the faucet id is a fungible faucet - dup exec.account_id::is_fungible_faucet + dup.1 exec.account_id::is_fungible_faucet assert.err=ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] - push.0 movdn.2 push.0 movdn.2 + # TODO(callbacks): This should take ASSET_KEY as input to avoid hardcoding the callbacks flag. + push.0 + # => [enable_callbacks = 0, faucet_id_suffix, faucet_id_prefix] + + exec.asset::create_fungible_key # => [ASSET_KEY] exec.get_asset - # => [ASSET] + # => [ASSET_VALUE] # extract the asset's balance - exec.::miden::protocol::util::asset::get_balance_from_fungible_asset + exec.::miden::protocol::util::asset::fungible_value_into_amount # => [balance] end #! Returns the balance of the fungible asset associated with the provided faucet_id in the active #! account's vault at the beginning of the transaction. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix] +#! Inputs: [faucet_id_suffix, faucet_id_prefix] #! Outputs: [init_balance] #! #! Where: -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible +#! - faucet_id_{suffix, prefix} are the suffix and prefix felts of the faucet id of the fungible #! asset of interest. #! - init_balance is the vault balance of the fungible asset at the beginning of the transaction. #! @@ -537,48 +544,49 @@ end #! Invocation: exec pub proc get_initial_balance # assert that the faucet id is a fungible faucet - dup exec.account_id::is_fungible_faucet + dup.1 exec.account_id::is_fungible_faucet assert.err=ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] + + # TODO(callbacks): This should take ASSET_KEY as input to avoid hardcoding the callbacks flag. + push.0 + # => [enable_callbacks = 0, faucet_id_suffix, faucet_id_prefix] - push.0 movdn.2 push.0 movdn.2 + exec.asset::create_fungible_key # => [ASSET_KEY] exec.get_initial_asset - # => [ASSET] + # => [ASSET_VALUE] # extract the asset's balance - exec.::miden::protocol::util::asset::get_balance_from_fungible_asset + exec.::miden::protocol::util::asset::fungible_value_into_amount # => [balance] end -#! Returns a boolean indicating whether the non-fungible asset is present in the active account's -#! vault. +#! Returns a boolean indicating whether the active account stores an asset with the provided +#! non-fungible asset vault key in its vault. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY] #! Outputs: [has_asset] #! #! Where: -#! - ASSET is the non-fungible asset of interest -#! - has_asset is a boolean indicating whether the account vault has the asset of interest +#! - ASSET_KEY is the key of the fungible asset to check. +#! - has_asset is a boolean indicating whether the account vault has the asset. #! #! Panics if: -#! - the ASSET is a fungible asset. +#! - the ASSET_VALUE is a fungible asset. #! #! Invocation: exec pub proc has_non_fungible_asset - # check if the asset is a non-fungible asset - # hack: assert second element is zero to ensure it is a non-fungible asset - # TODO(expand_assets): This procedure may go away. If not, check more reliably. - dup.2 neq.0 - assert.err=ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET - # => [ASSET] + # => [faucet_id_prefix, faucet_id_suffix, asset_id_prefix, asset_id_suffix] - exec.build_non_fungible_asset_vault_key + # assert that the faucet id is a non-fungible faucet + dup.3 exec.account_id::is_non_fungible_faucet + assert.err=ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET # => [ASSET_KEY] exec.get_asset - # => [ASSET] + # => [ASSET_VALUE] # compare with EMPTY_WORD to assess if the asset exists in the vault exec.word::eqz not @@ -672,38 +680,3 @@ pub proc has_procedure # => [is_procedure_available] end -# TODO(expand_assets): -# Exact copy of $kernel::asset_vault::build_non_fungible_asset_vault_key -# This should only be temporarily needed and if not, deduplicate. - -# The bitmask that when applied will set the fungible bit to zero. -const INVERSE_FUNGIBLE_BITMASK_U32=0xffffffdf # last byte: 0b1101_1111 - -#! -#! Builds the vault key of a non fungible asset. The asset is NOT validated and therefore must -#! be a valid non-fungible asset. -#! -#! Inputs: [ASSET] -#! Outputs: [ASSET_KEY] -#! -#! Where: -#! - ASSET is the non-fungible asset for which the vault key is built. -#! - ASSET_KEY is the vault key of the non-fungible asset. -proc build_non_fungible_asset_vault_key - # create the asset key from the non-fungible asset by swapping hash0 with the faucet id - # => [faucet_id_prefix, hash2, hash1, hash0] - swap.3 - # => [hash0, hash2, hash1 faucet_id_prefix] - - # disassemble hash0 into u32 limbs - u32split swap - # => [hash0_lo, hash0_hi, hash2, hash1 faucet_id_prefix] - - # set the fungible bit to 0 - u32and.INVERSE_FUNGIBLE_BITMASK_U32 - # => [hash0_lo', hash0_hi, hash2, hash1 faucet_id_prefix] - - # reassemble hash0 felt by multiplying the high part with 2^32 and adding the lo part - swap mul.0x0100000000 add - # => [ASSET_KEY] -end diff --git a/crates/miden-protocol/asm/protocol/active_note.masm b/crates/miden-protocol/asm/protocol/active_note.masm index d46edea0ad..8a34ba12fd 100644 --- a/crates/miden-protocol/asm/protocol/active_note.masm +++ b/crates/miden-protocol/asm/protocol/active_note.masm @@ -1,4 +1,4 @@ -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_ASSETS_INFO_OFFSET @@ -169,10 +169,10 @@ end #! Returns the sender of the active note. #! #! Inputs: [] -#! Outputs: [sender_id_prefix, sender_id_suffix] +#! Outputs: [sender_id_suffix, sender_id_prefix] #! #! Where: -#! - sender_{prefix,suffix} are the prefix and suffix felts of the sender of the active note. +#! - sender_{suffix,prefix} are the suffix and prefix felts of the sender of the active note. #! #! Panics if: #! - no note is currently active. @@ -185,7 +185,7 @@ pub proc get_sender # extract the sender ID from the metadata header exec.note::extract_sender_from_metadata - # => [sender_id_prefix, sender_id_suffix] + # => [sender_id_suffix, sender_id_prefix] end #! Returns the serial number of the active note. @@ -301,7 +301,7 @@ proc write_storage_to_memory # prepare the stack for the `pipe_double_words_to_memory` procedure. # - # To match `rpo256::hash_elements` (used for NOTE_STORAGE_COMMITMENT), we set the first capacity + # To match `poseidon2::hash_elements` (used for NOTE_STORAGE_COMMITMENT), we set the first capacity # element to `num_storage_items % 8`. dup.6 dup.6 # OS => [num_storage_items, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] @@ -311,21 +311,21 @@ proc write_storage_to_memory # OS => [num_storage_items_mod_8, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] - push.0.0.0 - # OS => [A, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr], where A = [0, 0, 0, num_storage_items_mod_8] + push.0.0.0 movup.3 + # OS => [CAPACITY = [num_storage_items_mod_8, 0, 0, 0], write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] padw padw - # OS => [PAD, PAD, A, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] + # OS => [RATE0, RATE1, CAPACITY, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] # write the inputs from the advice stack into memory exec.mem::pipe_double_words_to_memory - # OS => [PERM, PERM, PERM, end_ptr', NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] + # OS => [RATE0, RATE1, CAPACITY, end_ptr', NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [] # extract the computed commitment from the hasher state - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [COMPUTED_COMMITMENT, end_ptr', NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # drop end_ptr' diff --git a/crates/miden-protocol/asm/protocol/asset.masm b/crates/miden-protocol/asm/protocol/asset.masm index 5505fee44a..ce2ecfb614 100644 --- a/crates/miden-protocol/asm/protocol/asset.masm +++ b/crates/miden-protocol/asm/protocol/asset.masm @@ -1,69 +1,94 @@ use miden::protocol::account_id +use miden::protocol::util::asset # RE-EXPORTS # ================================================================================================= pub use ::miden::protocol::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +pub use ::miden::protocol::util::asset::ASSET_SIZE +pub use ::miden::protocol::util::asset::ASSET_VALUE_MEMORY_OFFSET +pub use ::miden::protocol::util::asset::key_to_faucet_id +pub use ::miden::protocol::util::asset::key_into_faucet_id +pub use ::miden::protocol::util::asset::key_to_asset_id +pub use ::miden::protocol::util::asset::key_into_asset_id +pub use ::miden::protocol::util::asset::key_to_callbacks_enabled +pub use ::miden::protocol::util::asset::store +pub use ::miden::protocol::util::asset::load +pub use ::miden::protocol::util::asset::fungible_value_into_amount +pub use ::miden::protocol::util::asset::fungible_to_amount +pub use ::miden::protocol::util::asset::create_fungible_key # ERRORS # ================================================================================================= -const ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID="failed to build the fungible asset because the provided faucet id is not from a fungible faucet" - const ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_ALLOWED_AMOUNT="fungible asset build operation called with amount that exceeds the maximum allowed asset amount" +const ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID="failed to build the fungible asset because the provided faucet id is not from a fungible faucet" + const ERR_NON_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID="failed to build the non-fungible asset because the provided faucet id is not from a non-fungible faucet" # PROCEDURES # ================================================================================================= -#! Builds a fungible asset for the specified fungible faucet and amount. +#! Creates a fungible asset for the specified fungible faucet and amount. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, amount] -#! Outputs: [ASSET] +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet to create the asset +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset #! for. #! - amount is the amount of the asset to create. -#! - ASSET is the built fungible asset. +#! - ASSET_KEY is the vault key of the created fungible asset. +#! - ASSET_VALUE is the value of the created fungible asset. +#! +#! Panics if: +#! - the provided faucet ID is not a fungible faucet. +#! - the provided amount exceeds FUNGIBLE_ASSET_MAX_AMOUNT. +#! - enable_callbacks is not 0 or 1. #! #! Invocation: exec -pub proc build_fungible_asset +pub proc create_fungible_asset # assert the faucet is a fungible faucet - dup exec.account_id::is_fungible_faucet assert.err=ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID - # => [faucet_id_prefix, faucet_id_suffix, amount] + dup.2 exec.account_id::is_fungible_faucet assert.err=ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] # assert the amount is valid - dup.2 lte.FUNGIBLE_ASSET_MAX_AMOUNT + dup.3 lte.FUNGIBLE_ASSET_MAX_AMOUNT assert.err=ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_ALLOWED_AMOUNT - # => [faucet_id_prefix, faucet_id_suffix, amount] + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] - # create the asset - push.0 movdn.2 - # => [ASSET] + # SAFETY: faucet ID and amount were validated + exec.asset::create_fungible_asset_unchecked + # => [ASSET_KEY, ASSET_VALUE] end -#! Builds a non fungible asset for the specified non-fungible faucet and amount. +#! Creates a non fungible asset for the specified non-fungible faucet. #! -#! Inputs: [faucet_id_prefix, DATA_HASH] -#! Outputs: [ASSET] +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, DATA_HASH] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet to create the asset +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset #! for. -#! - DATA_HASH is the data hash of the non-fungible asset to build. -#! - ASSET is the built non-fungible asset. +#! - DATA_HASH is the data hash of the non-fungible asset to create. +#! - ASSET_KEY is the vault key of the created non-fungible asset. +#! - ASSET_VALUE is the value of the created non-fungible asset, which is identical to DATA_HASH. +#! +#! Panics if: +#! - the provided faucet ID is not a non-fungible faucet. +#! - enable_callbacks is not 0 or 1. #! #! Invocation: exec -pub proc build_non_fungible_asset +pub proc create_non_fungible_asset # assert the faucet is a non-fungible faucet - dup exec.account_id::is_non_fungible_faucet + dup.2 exec.account_id::is_non_fungible_faucet assert.err=ERR_NON_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID - # => [faucet_id_prefix, hash3, hash2, hash1, hash0] + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, DATA_HASH] - # build the asset - swap drop - # => [faucet_id_prefix, hash2, hash1, hash0] - # => [ASSET] + # SAFETY: faucet ID was validated + exec.::miden::protocol::util::asset::create_non_fungible_asset_unchecked + # => [ASSET_KEY, ASSET_VALUE] end diff --git a/crates/miden-protocol/asm/protocol/faucet.masm b/crates/miden-protocol/asm/protocol/faucet.masm index 664632d17b..c9f105c743 100644 --- a/crates/miden-protocol/asm/protocol/faucet.masm +++ b/crates/miden-protocol/asm/protocol/faucet.masm @@ -2,15 +2,17 @@ use miden::protocol::asset use miden::protocol::active_account use ::miden::protocol::kernel_proc_offsets::FAUCET_MINT_ASSET_OFFSET use ::miden::protocol::kernel_proc_offsets::FAUCET_BURN_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::FAUCET_HAS_CALLBACKS_OFFSET #! Creates a fungible asset for the faucet the transaction is being executed against. #! #! Inputs: [amount] -#! Outputs: [ASSET] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: #! - amount is the amount of the asset to create. -#! - ASSET is the created fungible asset. +#! - ASSET_KEY is the vault key of the created fungible asset. +#! - ASSET_VALUE is the value of the created fungible asset. #! #! Panics if: #! - the active account is not a fungible faucet. @@ -19,43 +21,56 @@ use ::miden::protocol::kernel_proc_offsets::FAUCET_BURN_ASSET_OFFSET pub proc create_fungible_asset # fetch the id of the faucet the transaction is being executed against. exec.active_account::get_id - # => [id_prefix, id_suffix, amount] + # => [id_suffix, id_prefix, amount] - # build the fungible asset - exec.asset::build_fungible_asset - # => [ASSET] + # check whether the faucet has callbacks defined + exec.has_callbacks + # => [has_callbacks, id_suffix, id_prefix, amount] + + # create the fungible asset + exec.asset::create_fungible_asset + # => [ASSET_KEY, ASSET_VALUE] end #! Creates a non-fungible asset for the faucet the transaction is being executed against. #! #! Inputs: [DATA_HASH] -#! Outputs: [ASSET] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: #! - DATA_HASH is the data hash of the non-fungible asset to create. -#! - ASSET is the created non-fungible asset. +#! - ASSET_KEY is the vault key of the created non-fungible asset. +#! - ASSET_VALUE is the value of the created non-fungible asset. #! #! Panics if: #! - the active account is not a non-fungible faucet. #! #! Invocation: exec pub proc create_non_fungible_asset - # get the id of the faucet the transaction is being executed against - exec.active_account::get_id swap drop - # => [faucet_id_prefix, DATA_HASH] + # fetch the id of the faucet the transaction is being executed against + exec.active_account::get_id + # => [id_suffix, id_prefix, DATA_HASH] + + # check whether the faucet has callbacks defined + exec.has_callbacks + # => [has_callbacks, id_suffix, id_prefix, DATA_HASH] # build the non-fungible asset - exec.asset::build_non_fungible_asset - # => [ASSET] + exec.asset::create_non_fungible_asset + # => [ASSET_KEY, ASSET_VALUE] end #! Mint an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset that was minted. +#! - NEW_ASSET_VALUE is: +#! - For fungible assets: the ASSET_VALUE merged with the existing vault asset value, if any. +#! - For non-fungible assets: identical to ASSET_VALUE. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -68,28 +83,29 @@ end #! #! Invocation: exec pub proc mint - push.FAUCET_MINT_ASSET_OFFSET - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.FAUCET_MINT_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end #! Burn an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset to burn. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -103,17 +119,45 @@ end #! #! Invocation: exec pub proc burn + # pad the stack + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + push.FAUCET_BURN_ASSET_OFFSET - # => [offset, ASSET] + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] + + syscall.exec_kernel_proc + # => [pad(16)] + # clean the stack + dropw dropw dropw dropw + # => [] +end + +#! Returns whether the active account defines callbacks. +#! +#! The account defines callbacks if any callback storage slot is present and it contains not the +#! empty word. +#! +#! Inputs: [] +#! Outputs: [has_callbacks] +#! +#! Where: +#! - has_callbacks is 1 if the account defines callbacks, 0 otherwise. +#! +#! Invocation: exec +pub proc has_callbacks # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw padw push.0.0.0 + # => [pad(15)] + + push.FAUCET_HAS_CALLBACKS_OFFSET + # => [offset, pad(15)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [has_callbacks, pad(15)] # clean the stack - swapdw dropw dropw swapw dropw - # => [ASSET] + swapdw dropw dropw swapw dropw movdn.3 drop drop drop + # => [has_callbacks] end diff --git a/crates/miden-protocol/asm/protocol/input_note.masm b/crates/miden-protocol/asm/protocol/input_note.masm index 2b240c4910..5d09f5bc5f 100644 --- a/crates/miden-protocol/asm/protocol/input_note.masm +++ b/crates/miden-protocol/asm/protocol/input_note.masm @@ -166,11 +166,11 @@ end #! Returns the sender of the input note with the specified index. #! #! Inputs: [note_index] -#! Outputs: [sender_id_prefix, sender_id_suffix] +#! Outputs: [sender_id_suffix, sender_id_prefix] #! #! Where: #! - note_index is the index of the input note whose sender should be returned. -#! - sender_{prefix,suffix} are the prefix and suffix felts of the specified note. +#! - sender_{suffix,prefix} are the suffix and prefix felts of the specified note. #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. @@ -183,7 +183,7 @@ pub proc get_sender # extract the sender ID from the metadata header exec.note::extract_sender_from_metadata - # => [sender_id_prefix, sender_id_suffix] + # => [sender_id_suffix, sender_id_prefix] end #! Returns the inputs commitment and length of the input note with the specified index. diff --git a/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm b/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm index 7e65b6b089..eeb370179c 100644 --- a/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm +++ b/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm @@ -47,44 +47,45 @@ pub const ACCOUNT_HAS_PROCEDURE_OFFSET=24 ### Faucet ###################################### pub const FAUCET_MINT_ASSET_OFFSET=25 pub const FAUCET_BURN_ASSET_OFFSET=26 +pub const FAUCET_HAS_CALLBACKS_OFFSET=27 ### Note ######################################## # input notes -pub const INPUT_NOTE_GET_METADATA_OFFSET=27 -pub const INPUT_NOTE_GET_ASSETS_INFO_OFFSET=28 -pub const INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET=29 -pub const INPUT_NOTE_GET_STORAGE_INFO_OFFSET=30 -pub const INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET=31 -pub const INPUT_NOTE_GET_RECIPIENT_OFFSET=32 +pub const INPUT_NOTE_GET_METADATA_OFFSET=28 +pub const INPUT_NOTE_GET_ASSETS_INFO_OFFSET=29 +pub const INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET=30 +pub const INPUT_NOTE_GET_STORAGE_INFO_OFFSET=31 +pub const INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET=32 +pub const INPUT_NOTE_GET_RECIPIENT_OFFSET=33 # output notes -pub const OUTPUT_NOTE_CREATE_OFFSET=33 -pub const OUTPUT_NOTE_GET_METADATA_OFFSET=34 -pub const OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET=35 -pub const OUTPUT_NOTE_GET_RECIPIENT_OFFSET=36 -pub const OUTPUT_NOTE_ADD_ASSET_OFFSET=37 -pub const OUTPUT_NOTE_SET_ATTACHMENT_OFFSET=38 +pub const OUTPUT_NOTE_CREATE_OFFSET=34 +pub const OUTPUT_NOTE_GET_METADATA_OFFSET=35 +pub const OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET=36 +pub const OUTPUT_NOTE_GET_RECIPIENT_OFFSET=37 +pub const OUTPUT_NOTE_ADD_ASSET_OFFSET=38 +pub const OUTPUT_NOTE_SET_ATTACHMENT_OFFSET=39 ### Tx ########################################## # input notes -pub const TX_GET_NUM_INPUT_NOTES_OFFSET=39 -pub const TX_GET_INPUT_NOTES_COMMITMENT_OFFSET=40 +pub const TX_GET_NUM_INPUT_NOTES_OFFSET=40 +pub const TX_GET_INPUT_NOTES_COMMITMENT_OFFSET=41 # output notes -pub const TX_GET_NUM_OUTPUT_NOTES_OFFSET=41 -pub const TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET=42 +pub const TX_GET_NUM_OUTPUT_NOTES_OFFSET=42 +pub const TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET=43 # block info -pub const TX_GET_BLOCK_COMMITMENT_OFFSET=43 -pub const TX_GET_BLOCK_NUMBER_OFFSET=44 -pub const TX_GET_BLOCK_TIMESTAMP_OFFSET=45 +pub const TX_GET_BLOCK_COMMITMENT_OFFSET=44 +pub const TX_GET_BLOCK_NUMBER_OFFSET=45 +pub const TX_GET_BLOCK_TIMESTAMP_OFFSET=46 # foreign context -pub const TX_PREPARE_FPI_OFFSET = 46 -pub const TX_EXEC_FOREIGN_PROC_OFFSET = 47 +pub const TX_PREPARE_FPI_OFFSET = 47 +pub const TX_EXEC_FOREIGN_PROC_OFFSET = 48 # expiration data -pub const TX_GET_EXPIRATION_DELTA_OFFSET=48 # accessor -pub const TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET=49 # mutator +pub const TX_GET_EXPIRATION_DELTA_OFFSET=49 # accessor +pub const TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET=50 # mutator diff --git a/crates/miden-protocol/asm/protocol/native_account.masm b/crates/miden-protocol/asm/protocol/native_account.masm index c2ef28bbac..c3f390dd73 100644 --- a/crates/miden-protocol/asm/protocol/native_account.masm +++ b/crates/miden-protocol/asm/protocol/native_account.masm @@ -16,10 +16,10 @@ use ::miden::protocol::kernel_proc_offsets::ACCOUNT_WAS_PROCEDURE_CALLED_OFFSET #! Returns the ID of the native account of the transaction. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the native account ID of the +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the native account ID of the #! transaction. #! #! Invocation: exec @@ -33,14 +33,14 @@ pub proc get_id # => [is_native = 1, pad(14)] push.ACCOUNT_GET_ID_OFFSET - # => [offset, is_native = 0, pad(14)] + # => [offset, is_native = 1, pad(14)] syscall.exec_kernel_proc - # => [account_id_prefix, account_id_suffix, pad(14)] + # => [account_id_suffix, account_id_prefix, pad(14)] # clean the stack swapdw dropw dropw swapw dropw movdn.3 movdn.3 drop drop - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] end #! Increments the nonce of the native account by one and returns the new nonce. @@ -121,11 +121,11 @@ end #! Sets an item in the native account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value to set. #! - OLD_VALUE is the previous value of the item. @@ -137,11 +137,11 @@ end #! Invocation: exec pub proc set_item push.ACCOUNT_SET_ITEM_OFFSET - # => [offset, slot_id_prefix, slot_id_suffix, VALUE] + # => [offset, slot_id_suffix, slot_id_prefix, VALUE] # pad the stack push.0 movdn.7 padw padw swapdw - # => [offset, slot_id_prefix, slot_id_suffix, VALUE, pad(9)] + # => [offset, slot_id_suffix, slot_id_prefix, VALUE, pad(9)] syscall.exec_kernel_proc # => [OLD_VALUE, pad(12)] @@ -153,11 +153,11 @@ end #! Sets a map item in the native account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - KEY is the key to set at VALUE. @@ -173,17 +173,17 @@ end #! Invocation: exec pub proc set_map_item push.ACCOUNT_SET_MAP_ITEM_OFFSET - # => [offset, slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, VALUE] # pad the stack push.0 padw - # => [pad(4), 0, offset, slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [pad(4), 0, offset, slot_id_suffix, slot_id_prefix, KEY, VALUE] movdnw.3 - # => [0, offset, slot_id_prefix, slot_id_suffix, KEY, VALUE, pad(4)] + # => [0, offset, slot_id_suffix, slot_id_prefix, KEY, VALUE, pad(4)] movdn.11 - # => [offset, slot_id_prefix, slot_id_suffix, KEY, VALUE, pad(5)] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, VALUE, pad(5)] syscall.exec_kernel_proc # => [OLD_VALUE, pad(12)] @@ -198,14 +198,16 @@ end #! Add the specified asset to the vault. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE'] #! #! Where: -#! - ASSET' is a final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset to add to the vault. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. @@ -214,28 +216,31 @@ end #! #! Invocation: exec pub proc add_asset - push.ACCOUNT_ADD_ASSET_OFFSET - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.ACCOUNT_ADD_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET'] + # => [ASSET_VALUE'] end -#! Remove the specified asset from the vault. +#! Remove the specified asset from the vault and return the remaining asset value. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [REMAINING_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. +#! - REMAINING_ASSET_VALUE is the value of the asset remaining in the vault after removal which may +#! be the empty word if nothing remains (e.g. if a non-fungible asset is removed). #! #! Panics if: #! - the fungible asset is not found in the vault. @@ -244,19 +249,19 @@ end #! #! Invocation: exec pub proc remove_asset - push.ACCOUNT_REMOVE_ASSET_OFFSET - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.ACCOUNT_REMOVE_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [REMAINING_ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [REMAINING_ASSET_VALUE] end # CODE diff --git a/crates/miden-protocol/asm/protocol/note.masm b/crates/miden-protocol/asm/protocol/note.masm index 2f8cab9b46..482a264547 100644 --- a/crates/miden-protocol/asm/protocol/note.masm +++ b/crates/miden-protocol/asm/protocol/note.masm @@ -1,5 +1,5 @@ use miden::protocol::account_id -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem # Re-export the max inputs per note constant. @@ -39,7 +39,7 @@ pub proc compute_storage_commitment # => [storage_ptr, num_storage_items] # compute the storage commitment (over the unpadded values) - exec.rpo256::hash_elements + exec.poseidon2::hash_elements # => [STORAGE_COMMITMENT] end @@ -59,18 +59,18 @@ pub proc write_assets_to_memory # OS => [ASSETS_COMMITMENT, num_assets, dest_ptr] # AS => [[ASSETS_DATA]] - # calculate number of assets rounded up to an even number - dup.4 dup is_odd add - # OS => [even_num_assets, ASSETS_COMMITMENT, num_assets, dest_ptr] + dup.5 dup.5 + # OS => [num_assets, dest_ptr, ASSETS_COMMITMENT, num_assets, dest_ptr] # AS => [[ASSETS_DATA]] - # prepare the stack for the `pipe_preimage_to_memory` procedure - dup.6 swap - # OS => [even_num_assets, dest_ptr, ASSETS_COMMITMENT, num_assets, dest_ptr] + # each asset takes up two words, so num_words = 2 * num_assets + # this also guarantees we pass an even number to pipe_double_words_preimage_to_memory + mul.2 + # OS => [num_words, dest_ptr, ASSETS_COMMITMENT, num_assets, dest_ptr] # AS => [[ASSETS_DATA]] # write the data from the advice stack into memory - exec.mem::pipe_preimage_to_memory drop + exec.mem::pipe_double_words_preimage_to_memory drop # OS => [num_assets, dest_ptr] # AS => [] end @@ -135,17 +135,20 @@ pub proc build_recipient movdnw.2 # => [SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT] - padw adv.insert_hdword exec.rpo256::merge - # => [SERIAL_HASH, SCRIPT_ROOT, STORAGE_COMMITMENT] + padw swapw + # => [SERIAL_NUM, EMPTY_WORD, SCRIPT_ROOT, STORAGE_COMMITMENT] - swapw adv.insert_hdword exec.rpo256::merge - # => [SERIAL_SCRIPT_HASH, STORAGE_COMMITMENT] + adv.insert_hdword exec.poseidon2::merge + # => [SERIAL_COMMITMENT, SCRIPT_ROOT, STORAGE_COMMITMENT] - swapw adv.insert_hdword exec.rpo256::merge + adv.insert_hdword exec.poseidon2::merge + # => [SERIAL_SCRIPT_COMMITMENT, STORAGE_COMMITMENT] + + adv.insert_hdword exec.poseidon2::merge # => [RECIPIENT] end -#! Returns the RECIPIENT for a specified SERIAL_NUM, SCRIPT_ROOT, and storage commitment. +#! Returns the RECIPIENT for a specified SERIAL_NUM, SCRIPT_ROOT and STORAGE_COMMITMENT. #! #! Inputs: [SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT] #! Outputs: [RECIPIENT] @@ -158,39 +161,38 @@ end #! #! Invocation: exec pub proc build_recipient_hash - padw exec.rpo256::merge + padw swapw + # => [SERIAL_NUM, EMPTY_WORD, SCRIPT_ROOT, STORAGE_COMMITMENT] + + exec.poseidon2::merge # => [SERIAL_NUM_HASH, SCRIPT_ROOT, STORAGE_COMMITMENT] - swapw exec.rpo256::merge + exec.poseidon2::merge # => [MERGE_SCRIPT, STORAGE_COMMITMENT] - swapw exec.rpo256::merge - # [RECIPIENT] + exec.poseidon2::merge + # => [RECIPIENT] end #! Extracts the sender ID from the provided metadata header. #! #! Inputs: [METADATA_HEADER] -#! Outputs: [sender_id_prefix, sender_id_suffix] +#! Outputs: [sender_id_suffix, sender_id_prefix] #! #! Where: #! - METADATA_HEADER is the metadata of a note. -#! - sender_{prefix,suffix} are the prefix and suffix felts of the sender ID of the note which +#! - sender_{suffix,prefix} are the suffix and prefix felts of the sender ID of the note which #! metadata was provided. pub proc extract_sender_from_metadata - # => [attachment_kind_scheme, tag, sender_id_prefix, sender_id_suffix_and_note_type] + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag, attachment_kind_scheme] - # drop attachment kind, attachment scheme and tag - drop drop swap + # drop tag and attachment_kind_scheme + movup.3 drop movup.2 drop # => [sender_id_suffix_and_note_type, sender_id_prefix] # extract suffix of sender from merged layout, which means clearing the least significant byte exec.account_id::shape_suffix # => [sender_id_suffix, sender_id_prefix] - - # rearrange suffix and prefix - swap - # => [sender_id_prefix, sender_id_suffix] end #! Extracts the attachment kind and scheme from the provided metadata header. @@ -205,13 +207,13 @@ end #! #! Invocation: exec pub proc extract_attachment_info_from_metadata - # => [attachment_kind_scheme, METADATA_HEADER[1..4]] - movdn.3 drop drop drop + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag, attachment_kind_scheme] + drop drop drop # => [attachment_kind_scheme] # deconstruct the attachment_kind_scheme to extract the attachment_scheme # attachment_kind_scheme = [30 zero bits | attachment_kind (2 bits) | attachment_scheme (32 bits)] - # u32split splits into [high, low] where low is attachment_scheme - u32split + # u32split splits into [lo, hi] where lo is attachment_scheme + u32split swap # => [attachment_kind, attachment_scheme] end diff --git a/crates/miden-protocol/asm/protocol/output_note.masm b/crates/miden-protocol/asm/protocol/output_note.masm index d4bcd50414..afc344f195 100644 --- a/crates/miden-protocol/asm/protocol/output_note.masm +++ b/crates/miden-protocol/asm/protocol/output_note.masm @@ -126,24 +126,24 @@ pub proc get_assets # => [num_assets, dest_ptr, note_index] end -#! Adds the ASSET to the note specified by the index. +#! Adds the asset to the note specified by the index. #! -#! Inputs: [ASSET, note_idx] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] #! Outputs: [] #! #! Where: #! - note_idx is the index of the note to which the asset is added. -#! - ASSET can be a fungible or non-fungible asset. +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! #! Invocation: exec pub proc add_asset - movup.4 push.OUTPUT_NOTE_ADD_ASSET_OFFSET - # => [offset, note_idx, ASSET] + push.OUTPUT_NOTE_ADD_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, note_idx] - # pad the stack before the syscall to prevent accidental modification of the deeper stack - # elements - push.0.0 movdn.7 movdn.7 padw padw swapdw - # => [offset, note_idx, ASSET, pad(10)] + # pad the stack + repeat.6 push.0 movdn.10 end + # => [offset, ASSET_KEY, ASSET_VALUE, note_idx, pad(6)] syscall.exec_kernel_proc # => [pad(16)] diff --git a/crates/miden-protocol/asm/protocol/tx.masm b/crates/miden-protocol/asm/protocol/tx.masm index a09954f95a..2e5e6fdf88 100644 --- a/crates/miden-protocol/asm/protocol/tx.masm +++ b/crates/miden-protocol/asm/protocol/tx.masm @@ -211,11 +211,11 @@ end #! Executes the provided procedure against the foreign account. #! -#! Inputs: [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, foreign_procedure_inputs(16)] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_procedure_inputs(16)] #! Outputs: [foreign_procedure_outputs(16)] #! #! Where: -#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the account ID of the +#! - foreign_account_id_{suffix,prefix} are the suffix and prefix felts of the account ID of the #! foreign account to execute the procedure on. #! - foreign_procedure_inputs are the inputs to the foreign procedure padded to 16 felts. #! - foreign_procedure_outputs are the outputs of the foreign procedure padded to 16 felts. @@ -229,25 +229,25 @@ pub proc execute_foreign_procedure # store the foreign account ID and foreign procedure root to the local memory # this will allow us to get the 16th element of the foreign procedure inputs to pass it to the # `tx_prepare_fpi` kernel procedure - loc_store.4 loc_store.5 loc_storew_be.0 dropw + loc_store.4 loc_store.5 loc_storew_le.0 dropw # OS => [foreign_procedure_inputs(16)] - # LM => [FOREIGN_PROC_ROOT, foreign_account_id_prefix, foreign_account_id_suffix] + # LM => [FOREIGN_PROC_ROOT, foreign_account_id_suffix, foreign_account_id_prefix] # move up the last element of the foreign procedure inputs movup.15 # => [foreign_proc_input_value_15, foreign_procedure_inputs(15)] # load the foreign account ID and foreign procedure root back to the operand stack - padw loc_loadw_be.0 loc_load.5 loc_load.4 - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, foreign_procedure_inputs(15)] + padw loc_loadw_le.0 loc_load.5 loc_load.4 + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, foreign_procedure_inputs(15)] # get the tx_prepare_fpi procedure offset push.TX_PREPARE_FPI_OFFSET - # => [offset, foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, foreign_procedure_inputs(15)] - + # => [offset, foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, foreign_procedure_inputs(15)] + # pad the stack before the syscall padw padw swapdw - # => [offset, foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, + # => [offset, foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, # foreign_proc_input_value_15, pad(8), foreign_procedure_inputs(15)] # store the foreign account ID, foreign procedure root, and the 16th (last) element of the diff --git a/crates/miden-protocol/asm/shared_modules/account_id.masm b/crates/miden-protocol/asm/shared_modules/account_id.masm index 4dcf7d4e11..9f1637a9cc 100644 --- a/crates/miden-protocol/asm/shared_modules/account_id.masm +++ b/crates/miden-protocol/asm/shared_modules/account_id.masm @@ -51,11 +51,11 @@ const ACCOUNT_ID_STORAGE_MODE_INVALID_U32=0xc0 # 0b1100_0000 #! Returns a boolean indicating whether the account is a fungible faucet. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_fungible_faucet] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_fungible_faucet is a boolean indicating whether the account is a fungible faucet. pub proc is_fungible_faucet exec.id_type eq.FUNGIBLE_FAUCET_ACCOUNT @@ -64,11 +64,11 @@ end #! Returns a boolean indicating whether the account is a non-fungible faucet. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_non_fungible_faucet] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_non_fungible_faucet is a boolean indicating whether the account is a non-fungible faucet. pub proc is_non_fungible_faucet exec.id_type eq.NON_FUNGIBLE_FAUCET_ACCOUNT @@ -77,43 +77,43 @@ end #! Returns a boolean indicating whether the given account_ids are equal. #! -#! Inputs: [acct_id_prefix, acct_id_suffix, other_acct_id_prefix, other_acct_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix, other_account_id_suffix, other_account_id_prefix] #! Outputs: [is_id_equal] #! #! Where: -#! - acct_id_{prefix,suffix} are the prefix and suffix felts of an account ID. -#! - other_acct_id_{prefix,suffix} are the prefix and suffix felts of the other account ID to +#! - account_id_{suffix,prefix} are the suffix and prefix felts of an account ID. +#! - other_account_id_{suffix,prefix} are the suffix and prefix felts of the other account ID to #! compare against. #! - is_id_equal is a boolean indicating whether the account IDs are equal. pub proc is_equal movup.2 eq - # => [is_prefix_equal, acct_id_suffix, other_acct_id_suffix] + # => [is_suffix_equal, account_id_prefix, other_account_id_prefix] movdn.2 eq - # => [is_suffix_equal, is_prefix_equal] + # => [is_prefix_equal, is_suffix_equal] and # => [is_id_equal] end #! Returns a boolean indicating whether the account is a faucet. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_faucet] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_faucet is a boolean indicating whether the account is a faucet. pub proc is_faucet - u32split drop u32and.FAUCET_ACCOUNT neq.0 + u32split swap drop u32and.FAUCET_ACCOUNT neq.0 # => [is_faucet] end #! Returns a boolean indicating whether the account is a regular updatable account. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_updatable_account] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_updatable_account is a boolean indicating whether the account is a regular updatable #! account. pub proc is_updatable_account @@ -123,11 +123,11 @@ end #! Returns a boolean indicating whether the account is a regular immutable account. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_immutable_account] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_immutable_account is a boolean indicating whether the account is a regular immutable #! account. pub proc is_immutable_account @@ -138,11 +138,11 @@ end #! Validates an account ID. Note that this does not validate anything about the account type, #! since any 2-bit pattern is a valid account type. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the account ID. #! #! Panics if: #! - account_id_prefix does not contain version zero. @@ -150,13 +150,31 @@ end #! - account_id_suffix does not have its most significant bit set to zero. #! - account_id_suffix does not have its lower 8 bits set to zero. pub proc validate + # Validate lower 8 bits of suffix are zero. + # --------------------------------------------------------------------------------------------- + + u32split + # => [account_id_suffix_lo, account_id_suffix_hi, account_id_prefix] + u32and.0xff eq.0 + # => [is_least_significant_byte_zero, account_id_suffix_hi, account_id_prefix] + assert.err=ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO + # => [account_id_suffix_hi, account_id_prefix] + + # Validate most significant bit in suffix is zero. + # --------------------------------------------------------------------------------------------- + + u32shr.31 eq.0 + # => [is_most_significant_bit_zero, account_id_prefix] + assert.err=ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO + # => [account_id_prefix] + # Validate version in prefix. For now only version 0 is supported. # --------------------------------------------------------------------------------------------- dup exec.id_version - # => [id_version, account_id_prefix, account_id_suffix] + # => [id_version, account_id_prefix] assertz.err=ERR_ACCOUNT_ID_UNKNOWN_VERSION - # => [account_id_prefix, account_id_suffix] + # => [account_id_prefix] # Validate storage mode in prefix. # --------------------------------------------------------------------------------------------- @@ -164,30 +182,13 @@ pub proc validate # there are 3 valid and 1 invalid storage mode # instead of checking the presence of any of the valid modes, we check the absence of the # invalid mode - u32split drop - # => [account_id_prefix_lo, account_id_suffix] + u32split swap drop + # => [account_id_prefix_lo] u32and.ACCOUNT_ID_STORAGE_MODE_MASK_U32 - # => [id_storage_mode_masked, account_id_suffix] + # => [id_storage_mode_masked] eq.ACCOUNT_ID_STORAGE_MODE_INVALID_U32 - # => [is_storage_mode_invalid, account_id_suffix] + # => [is_storage_mode_invalid] assertz.err=ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE - # => [account_id_suffix] - - # Validate most significant bit in suffix is zero. - # --------------------------------------------------------------------------------------------- - - u32split - # => [account_id_suffix_hi, account_id_suffix_lo] - u32shr.31 eq.0 - # => [is_most_significant_bit_zero, account_id_suffix_lo] - assert.err=ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO - - # Validate lower 8 bits of suffix are zero. - # --------------------------------------------------------------------------------------------- - - u32and.0xff eq.0 - # => [is_least_significant_byte_zero] - assert.err=ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO # => [] end @@ -202,7 +203,7 @@ end #! of an account ID. #! - account_id_suffix is the suffix of an account ID. pub proc shape_suffix - u32split swap + u32split # => [seed_digest_suffix_lo, seed_digest_suffix_hi] # clear lower 8 bits of the lo part @@ -227,7 +228,7 @@ end #! - id_version is the version number of the ID. proc id_version # extract the lower 32 bits - u32split drop + u32split swap drop # => [account_id_prefix_lo] # mask out the version @@ -243,13 +244,13 @@ end #! - FUNGIBLE_FAUCET_ACCOUNT #! - NON_FUNGIBLE_FAUCET_ACCOUNT #! -#! Inputs: [acct_id_prefix] -#! Outputs: [acct_type] +#! Inputs: [account_id_prefix] +#! Outputs: [account_type] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. -#! - acct_type is the account type. +#! - account_id_prefix is the prefix of the account ID. +#! - account_type is the account type. proc id_type - u32split drop u32and.ACCOUNT_ID_TYPE_MASK_U32 - # => [acct_type] + u32split swap drop u32and.ACCOUNT_ID_TYPE_MASK_U32 + # => [account_type] end diff --git a/crates/miden-protocol/asm/shared_modules/types.masm b/crates/miden-protocol/asm/shared_modules/types.masm new file mode 100644 index 0000000000..5ba8b5e69f --- /dev/null +++ b/crates/miden-protocol/asm/shared_modules/types.masm @@ -0,0 +1,6 @@ +# TYPE ALIASES +# ================================================================================================= + +pub type AccountId = struct { prefix: felt, suffix: felt } +pub type DoubleWord = struct { word_lo: word, word_hi: word } +pub type MemoryAddress = u32 diff --git a/crates/miden-protocol/asm/shared_utils/util/asset.masm b/crates/miden-protocol/asm/shared_utils/util/asset.masm index 13df819405..5e0a254c31 100644 --- a/crates/miden-protocol/asm/shared_utils/util/asset.masm +++ b/crates/miden-protocol/asm/shared_utils/util/asset.masm @@ -1,3 +1,8 @@ +# ERRORS +# ================================================================================================= + +const ERR_VAULT_INVALID_ENABLE_CALLBACKS = "enable_callbacks must be 0 or 1" + # CONSTANTS # ================================================================================================= @@ -6,20 +11,350 @@ # This is 2^63 - 2^31. See account_delta.masm for more details. pub const FUNGIBLE_ASSET_MAX_AMOUNT=0x7fffffff80000000 +# The number of elements in an asset, i.e. vault key and value. +pub const ASSET_SIZE = 8 + +# The offset of the asset value in an asset stored in memory. +pub const ASSET_VALUE_MEMORY_OFFSET = 4 + +# The flag representing disabled callbacks. +pub const CALLBACKS_DISABLED = 0 + +# The flag representing enabled callbacks. +pub const CALLBACKS_ENABLED = 1 + # PROCEDURES # ================================================================================================= -#! Returns the balance of the given fungible asset. +#! Stores an asset key and value into memory at the given pointer. +#! +#! The memory range pointer..pointer+8 will be overwritten. +#! +#! Inputs: [ptr, ASSET_KEY, ASSET_VALUE] +#! Outputs: [] +#! +#! Where: +#! - ptr is the memory address where the asset will be stored. +#! - ASSET_KEY is the 4-element word representing the asset key. +#! - ASSET_VALUE is the 4-element word representing the asset value. +pub proc store + # store asset key + movdn.4 dup.4 + # => [ptr, ASSET_KEY, ptr, ASSET_VALUE] + + mem_storew_le dropw + # => [ptr, ASSET_VALUE] + + # store asset value + add.ASSET_VALUE_MEMORY_OFFSET mem_storew_le dropw + # => [] +end + +#! Loads an asset key and value from memory given a pointer to the asset. +#! +#! Inputs: [ptr] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ptr is the memory address of the asset. +#! - ASSET_KEY is the 4-element word representing the asset key. +#! - ASSET_VALUE is the 4-element word representing the asset value. +pub proc load + # load asset value + padw dup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le + # => [ASSET_VALUE, ptr] + + # load asset key + padw movup.8 mem_loadw_le + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Returns the balance of the given fungible asset and consumes it. #! -#! Note: Assumes that the given asset is fungible and does NOT validate it. +#! WARNING: Assumes that the given asset value is fungible and does NOT validate it. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_VALUE] #! Outputs: [balance] #! #! Where: -#! - ASSET is the fungible asset from which to extract the balance. +#! - ASSET_VALUE is the fungible asset from which to extract the balance. #! - balance is the amount of the fungible asset. -pub proc get_balance_from_fungible_asset - drop drop drop +pub proc fungible_value_into_amount + movdn.3 drop drop drop # => [balance] end + +#! Returns the balance of the given fungible asset. +#! +#! WARNING: Assumes that the given asset value is fungible and does NOT validate it. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [amount, ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ASSET_VALUE is the fungible asset from which to extract the balance. +#! - amount is the amount of the fungible asset. +pub proc fungible_to_amount + # => [ASSET_KEY, [amount, 0, 0, 0]] + dup.4 + # => [amount, ASSET_KEY, ASSET_VALUE] +end + +#! Returns the faucet ID from an asset vault key. +#! +#! WARNING: The faucet ID is not validated. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] +#! +#! Where: +#! - faucet_id is the account ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the faucet ID. +pub proc key_to_faucet_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + dup.3 dup.3 + # => [faucet_id_suffix_and_metadata, faucet_id_prefix, ASSET_KEY] + + exec.split_suffix_and_metadata drop + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] +end + +#! Returns the faucet ID from an asset vault key and consumes it. +#! +#! WARNING: The faucet ID is not validated. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [faucet_id_suffix, faucet_id_prefix] +#! +#! Where: +#! - faucet_id is the account ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the faucet ID. +pub proc key_into_faucet_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + drop drop + # => [faucet_id_suffix_and_metadata, faucet_id_prefix] + + exec.split_suffix_and_metadata drop + # => [faucet_id_suffix, faucet_id_prefix] +end + +#! Returns the asset ID from an asset vault key. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [asset_id_suffix, asset_id_prefix, ASSET_KEY] +#! +#! Where: +#! - asset_id is the asset ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the asset ID. +pub proc key_to_asset_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + dup.1 dup.1 + # => [asset_id_suffix, asset_id_prefix, ASSET_KEY] +end + +#! Returns the asset ID from an asset vault key and consumes it. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [asset_id_suffix, asset_id_prefix] +#! +#! Where: +#! - asset_id is the asset ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the asset ID. +pub proc key_into_asset_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + movup.2 drop movup.2 drop + # => [asset_id_suffix, asset_id_prefix] +end + +#! Returns the asset callbacks flag from an asset vault key. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [callbacks_enabled, ASSET_KEY] +#! +#! Where: +#! - ASSET_KEY is the vault key from which to extract the metadata. +#! - callbacks_enabled is 1 if callbacks are enabled and 0 if disabled. +pub proc key_to_callbacks_enabled + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + dup.2 + # => [faucet_id_suffix_and_metadata, ASSET_KEY] + + exec.split_suffix_and_metadata swap drop + # => [asset_metadata, ASSET_KEY] + + exec.metadata_into_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY] +end + +#! Creates a fungible asset vault key for the specified faucet. +#! +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix] +#! Outputs: [ASSET_KEY] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the fungible faucet. +#! - ASSET_KEY is the vault key for the fungible asset. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +#! +#! Invocation: exec +pub proc create_fungible_key + exec.create_metadata + # => [asset_metadata, faucet_id_suffix, faucet_id_prefix] + + # merge the asset metadata into the lower 8 bits of the suffix + # this is safe since create_metadata builds only valid metadata + add + # => [faucet_id_suffix_and_metadata, faucet_id_prefix] + + push.0.0 + # => [0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix] + # => [ASSET_KEY] +end + +#! Creates a fungible asset for the specified fungible faucet and amount. +#! +#! WARNING: Does not validate the faucet ID or amount. +#! +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset +#! for. +#! - amount is the amount of the asset to create. +#! - ASSET_KEY is the vault key of the created fungible asset +#! - ASSET_VALUE is the value of the created fungible asset. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +#! +#! Invocation: exec +pub proc create_fungible_asset_unchecked + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] + + # pad amount into ASSET_VALUE + repeat.3 push.0 movdn.4 end + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, ASSET_VALUE] + + exec.create_fungible_key + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Creates a non fungible asset for the specified non-fungible faucet. +#! +#! WARNING: Does not validate its inputs. +#! +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, DATA_HASH] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset +#! for. +#! - DATA_HASH is the data hash of the non-fungible asset to create. +#! - ASSET_KEY is the vault key of the created non-fungible asset. +#! - ASSET_VALUE is the value of the created non-fungible asset, which is identical to DATA_HASH. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +#! +#! Invocation: exec +pub proc create_non_fungible_asset_unchecked + exec.create_metadata + # => [asset_metadata, faucet_id_suffix, faucet_id_prefix, DATA_HASH] + + # merge the asset metadata into the lower 8 bits of the suffix + add + # => [faucet_id_suffix_and_metadata, faucet_id_prefix, DATA_HASH] + + # copy hashes at indices 0 and 1 in the data hash word to the corresponding index in the key + # word + dup.3 dup.3 + # => [hash0, hash1, faucet_id_suffix_and_metadata, faucet_id_prefix, DATA_HASH] + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Splits the merged faucet ID suffix and the asset metadata. +#! +#! Inputs: [faucet_id_suffix_and_metadata] +#! Outputs: [asset_metadata, faucet_id_suffix] +#! +#! Where: +#! - faucet_id_suffix_and_metadata is the faucet ID suffix merged with the asset metadata. +#! - faucet_id_suffix is the suffix of the account ID. +#! - asset_metadata is the asset metadata. +pub proc split_suffix_and_metadata + u32split + # => [suffix_metadata_lo, suffix_metadata_hi] + + dup movdn.2 + # => [suffix_metadata_lo, suffix_metadata_hi, suffix_metadata_lo] + + # clear lower 8 bits of the lo part to get the actual ID suffix + u32and.0xffffff00 swap + # => [suffix_metadata_hi, suffix_metadata_lo', suffix_metadata_lo] + + # reassemble the ID suffix by multiplying the hi part with 2^32 and adding the lo part + mul.0x0100000000 add + # => [faucet_id_suffix, suffix_metadata_lo] + + # extract lower 8 bits of the lo part to get the metadata + swap u32and.0xff + # => [asset_metadata, faucet_id_suffix] +end + +#! Validates that asset metadata is well formed and consumes it. +#! +#! Inputs: [asset_metadata] +#! Outputs: [] +#! +#! Panics if: +#! - asset_metadata is not a valid u32 or exceeds CALLBACKS_ENABLED. +pub proc validate_metadata + u32assert.err=ERR_VAULT_INVALID_ENABLE_CALLBACKS + u32lte.CALLBACKS_ENABLED + assert.err=ERR_VAULT_INVALID_ENABLE_CALLBACKS + # => [] +end + +#! Creates asset metadata from the provided inputs. +#! +#! Inputs: [enable_callbacks] +#! Outputs: [asset_metadata] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether the asset callbacks flag should be set. +#! - asset_metadata is the asset metadata. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +proc create_metadata + # for now, enable_callbacks is identical to asset_metadata + dup exec.validate_metadata + # => [asset_metadata] +end + +#! Extracts the asset callback flag from asset metadata. +#! +#! WARNING: asset_metadata is assumed to be a byte (in particular a valid u32) +#! +#! Inputs: [asset_metadata] +#! Outputs: [callbacks_enabled] +#! +#! Where: +#! - asset_metadata is the asset metadata. +#! - callbacks_enabled is 1 if callbacks are enabled and 0 if disabled. +proc metadata_into_callbacks_enabled + # extract the least significant bit of the metadata + u32and.1 + # => [callbacks_enabled] +end diff --git a/crates/miden-protocol/build.rs b/crates/miden-protocol/build.rs index cb55eafb6a..93f45f8ac5 100644 --- a/crates/miden-protocol/build.rs +++ b/crates/miden-protocol/build.rs @@ -6,17 +6,13 @@ use std::sync::Arc; use fs_err as fs; use miden_assembly::diagnostics::{IntoDiagnostic, Result, WrapErr, miette}; use miden_assembly::{Assembler, DefaultSourceManager, KernelLibrary, Library}; +use miden_core::events::EventId; use regex::Regex; use walkdir::WalkDir; // CONSTANTS // ================================================================================================ -/// Defines whether the build script should generate files in `/src`. -/// The docs.rs build pipeline has a read-only filesystem, so we have to avoid writing to `src`, -/// otherwise the docs will fail to build there. Note that writing to `OUT_DIR` is fine. -const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN_SRC").is_some(); - const ASSETS_DIR: &str = "assets"; const ASM_DIR: &str = "asm"; const ASM_PROTOCOL_DIR: &str = "protocol"; @@ -24,12 +20,12 @@ const ASM_PROTOCOL_DIR: &str = "protocol"; const SHARED_UTILS_DIR: &str = "shared_utils"; const SHARED_MODULES_DIR: &str = "shared_modules"; const ASM_TX_KERNEL_DIR: &str = "kernels/transaction"; -const KERNEL_PROCEDURES_RS_FILE: &str = "src/transaction/kernel/procedures.rs"; const PROTOCOL_LIB_NAMESPACE: &str = "miden::protocol"; -const TX_KERNEL_ERRORS_FILE: &str = "src/errors/tx_kernel.rs"; -const PROTOCOL_LIB_ERRORS_FILE: &str = "src/errors/protocol.rs"; +const KERNEL_PROCEDURES_RS_FILE: &str = "procedures.rs"; +const TX_KERNEL_ERRORS_RS_FILE: &str = "tx_kernel_errors.rs"; +const PROTOCOL_LIB_ERRORS_RS_FILE: &str = "protocol_errors.rs"; const TX_KERNEL_ERRORS_ARRAY_NAME: &str = "TX_KERNEL_ERRORS"; const PROTOCOL_LIB_ERRORS_ARRAY_NAME: &str = "PROTOCOL_LIB_ERRORS"; @@ -61,7 +57,6 @@ const TX_KERNEL_ERROR_CATEGORIES: [&str; 14] = [ fn main() -> Result<()> { // re-build when the MASM code changes println!("cargo::rerun-if-changed={ASM_DIR}/"); - println!("cargo::rerun-if-env-changed=BUILD_GENERATED_FILES_IN_SRC"); // Copies the MASM code to the build directory let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); @@ -80,14 +75,17 @@ fn main() -> Result<()> { let target_dir = Path::new(&build_dir).join(ASSETS_DIR); // compile transaction kernel - let mut assembler = - compile_tx_kernel(&source_dir.join(ASM_TX_KERNEL_DIR), &target_dir.join("kernels"))?; + let mut assembler = compile_tx_kernel( + &source_dir.join(ASM_TX_KERNEL_DIR), + &target_dir.join("kernels"), + &build_dir, + )?; // compile protocol library let protocol_lib = compile_protocol_lib(&source_dir, &target_dir, assembler.clone())?; assembler.link_dynamic_library(protocol_lib)?; - generate_error_constants(&source_dir)?; + generate_error_constants(&source_dir, &build_dir)?; generate_event_constants(&source_dir, &target_dir)?; @@ -118,7 +116,7 @@ fn main() -> Result<()> { /// - {target_dir}/tx_script_main.masb -> contains the executable compiled from /// tx_script_main.masm. /// - src/transaction/procedures/kernel_v0.rs -> contains the kernel procedures table. -fn compile_tx_kernel(source_dir: &Path, target_dir: &Path) -> Result { +fn compile_tx_kernel(source_dir: &Path, target_dir: &Path, build_dir: &str) -> Result { let shared_utils_path = std::path::Path::new(ASM_DIR).join(SHARED_UTILS_DIR); let kernel_path = miden_assembly::Path::kernel_path(); @@ -131,7 +129,7 @@ fn compile_tx_kernel(source_dir: &Path, target_dir: &Path) -> Result .assemble_kernel_from_dir(source_dir.join("api.masm"), Some(source_dir.join("lib")))?; // generate kernel `procedures.rs` file - generate_kernel_proc_hash_file(kernel_lib.clone())?; + generate_kernel_proc_hash_file(kernel_lib.clone(), build_dir)?; let output_file = target_dir.join("tx_kernel").with_extension(Library::LIBRARY_EXTENSION); kernel_lib.write_to_file(output_file).into_diagnostic()?; @@ -192,14 +190,10 @@ fn compile_tx_script_main( tx_script_main.write_to_file(masb_file_path).into_diagnostic() } -/// Generates kernel `procedures.rs` file based on the kernel library -fn generate_kernel_proc_hash_file(kernel: KernelLibrary) -> Result<()> { - // Because the kernel Rust file will be stored under ./src, this should be a no-op if we can't - // write there - if !BUILD_GENERATED_FILES_IN_SRC { - return Ok(()); - } - +/// Generates kernel `procedures.rs` file based on the kernel library. +/// +/// The file is written to `{build_dir}/procedures.rs` and included via `include!` in the source. +fn generate_kernel_proc_hash_file(kernel: KernelLibrary, build_dir: &str) -> Result<()> { let (_, module_info, _) = kernel.into_parts(); let to_exclude = BTreeSet::from_iter(["exec_kernel_proc"]); @@ -230,8 +224,9 @@ fn generate_kernel_proc_hash_file(kernel: KernelLibrary) -> Result<()> { txt }).collect::>().join("\n"); - shared::write_if_changed( - KERNEL_PROCEDURES_RS_FILE, + let output_path = Path::new(build_dir).join(KERNEL_PROCEDURES_RS_FILE); + fs::write( + output_path, format!( r#"// This file is generated by build.rs, do not modify @@ -247,6 +242,7 @@ pub const KERNEL_PROCEDURES: [Word; {proc_count}] = [ "#, ), ) + .into_diagnostic() } fn parse_proc_offsets(filename: impl AsRef) -> Result> { @@ -299,6 +295,7 @@ fn build_assembler(kernel: Option) -> Result { kernel .map(|kernel| Assembler::with_kernel(Arc::new(DefaultSourceManager::default()), kernel)) .unwrap_or_default() + .with_warnings_as_errors(true) .with_dynamic_library(miden_core_lib::CoreLibrary::default()) } @@ -352,25 +349,29 @@ fn copy_shared_modules>(source_dir: T) -> Result<()> { /// The function ensures that a constant is not defined twice, except if their error message is /// the same. This can happen across multiple files. /// -/// Because the error files will be written to ./src/errors, this should be a no-op if ./src is -/// read-only. To enable writing to ./src, set the `BUILD_GENERATED_FILES_IN_SRC` environment -/// variable. -fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { - if !BUILD_GENERATED_FILES_IN_SRC { - return Ok(()); - } +/// The generated files are written to `build_dir` (i.e. `OUT_DIR`) and included via `include!` +/// in the source. +fn generate_error_constants(asm_source_dir: &Path, build_dir: &str) -> Result<()> { + // Shared utils errors + // For now these are duplicated in the tx kernel and protocol error module. + // ------------------------------------------ + + let shared_utils_dir = asm_source_dir.join(SHARED_UTILS_DIR); + let shared_utils_errors = shared::extract_all_masm_errors(&shared_utils_dir) + .context("failed to extract all masm errors")?; // Transaction kernel errors // ------------------------------------------ let tx_kernel_dir = asm_source_dir.join(ASM_TX_KERNEL_DIR); - let errors = shared::extract_all_masm_errors(&tx_kernel_dir) + let mut errors = shared::extract_all_masm_errors(&tx_kernel_dir) .context("failed to extract all masm errors")?; + errors.extend_from_slice(&shared_utils_errors); validate_tx_kernel_category(&errors)?; shared::generate_error_file( shared::ErrorModule { - file_name: TX_KERNEL_ERRORS_FILE, + file_path: Path::new(build_dir).join(TX_KERNEL_ERRORS_RS_FILE), array_name: TX_KERNEL_ERRORS_ARRAY_NAME, is_crate_local: true, }, @@ -381,12 +382,13 @@ fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { // ------------------------------------------ let protocol_dir = asm_source_dir.join(ASM_PROTOCOL_DIR); - let errors = shared::extract_all_masm_errors(&protocol_dir) + let mut errors = shared::extract_all_masm_errors(&protocol_dir) .context("failed to extract all masm errors")?; + errors.extend(shared_utils_errors); shared::generate_error_file( shared::ErrorModule { - file_name: PROTOCOL_LIB_ERRORS_FILE, + file_path: Path::new(build_dir).join(PROTOCOL_LIB_ERRORS_RS_FILE), array_name: PROTOCOL_LIB_ERRORS_ARRAY_NAME, is_crate_local: true, }, @@ -511,36 +513,15 @@ fn generate_event_file_content( // want to error out as early as possible: // TODO: make the error out at build-time to be able to present better error hints for (event_path, event_name) in events { - let value = miden_core::EventId::from_name(event_path).as_felt().as_int(); + let value = EventId::from_name(event_path).as_felt().as_canonical_u64(); debug_assert!(!event_name.is_empty()); - writeln!(&mut output, "const {}: u64 = {};", event_name, value)?; - } - - { - writeln!(&mut output)?; - - writeln!(&mut output)?; - - writeln!( - &mut output, - r###" -use alloc::collections::BTreeMap; - -pub(crate) static EVENT_NAME_LUT: ::miden_utils_sync::LazyLock> = - ::miden_utils_sync::LazyLock::new(|| {{ - BTreeMap::from_iter([ -"### - )?; - - for (event_path, const_name) in events { - writeln!(&mut output, " ({}, \"{}\"),", const_name, event_path)?; - } - + writeln!(&mut output, "const {}_ID: u64 = {};", event_name, value)?; writeln!( &mut output, - r###" ]) -}});"### + "static {}_NAME: ::miden_core::events::EventName = ::miden_core::events::EventName::new(\"{}\");", + event_name, event_path )?; + writeln!(&mut output)?; } Ok(output) @@ -736,7 +717,7 @@ mod shared { } /// Generates the content of an error file for the given category and the set of errors and - /// writes it to the category's file. + /// writes it to the file at the path specified in the module. pub fn generate_error_file(module: ErrorModule, errors: Vec) -> Result<()> { let mut output = String::new(); @@ -783,26 +764,11 @@ mod shared { .into_diagnostic()?; } - write_if_changed(module.file_name, output)?; + fs::write(module.file_path, output).into_diagnostic()?; Ok(()) } - /// Writes `contents` to `path` only if the file doesn't exist or its current contents - /// differ. This avoids updating the file's mtime when nothing changed, which prevents - /// cargo from treating the crate as dirty on the next build. - pub fn write_if_changed(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result<()> { - let path = path.as_ref(); - let new_contents = contents.as_ref(); - if path.exists() { - let existing = std::fs::read(path).into_diagnostic()?; - if existing == new_contents { - return Ok(()); - } - } - std::fs::write(path, new_contents).into_diagnostic() - } - pub type ErrorName = String; #[derive(Debug, Clone)] @@ -816,9 +782,9 @@ mod shared { pub message: String, } - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + #[derive(Debug, Clone)] pub struct ErrorModule { - pub file_name: &'static str, + pub file_path: PathBuf, pub array_name: &'static str, pub is_crate_local: bool, } diff --git a/crates/miden-protocol/masm_doc_comment_fmt.md b/crates/miden-protocol/masm_doc_comment_fmt.md index dbf2eba095..b46daa8d1c 100644 --- a/crates/miden-protocol/masm_doc_comment_fmt.md +++ b/crates/miden-protocol/masm_doc_comment_fmt.md @@ -85,10 +85,17 @@ Example: Each variable could represent a single value or a sequence of four values (a Word). Variable representing a single value should be written in lowercase, and a variable for the word should be written in uppercase. +For multi-element values that are not exactly one word (4 felts), append `(N)` to indicate the count: + +- `value` is a single felt. +- `value(N)` are N felts (where N is not 4). +- `VALUE` is a word (4 felts). No `(4)` suffix is needed since uppercase already implies a word. + Example: ```masm #! Inputs: [single_value, SOME_WORD] +#! Inputs: [dest_address(5), amount_u256(8), pad(2)] ``` Variable, which represents a memory address, should have a `_ptr` suffix in its name. For example, `note_script_commitment_ptr`. diff --git a/crates/miden-protocol/src/account/account_id/account_type.rs b/crates/miden-protocol/src/account/account_id/account_type.rs index e02f9e7cef..1ea4c02f98 100644 --- a/crates/miden-protocol/src/account/account_id/account_type.rs +++ b/crates/miden-protocol/src/account/account_id/account_type.rs @@ -2,7 +2,13 @@ use core::fmt; use core::str::FromStr; use crate::errors::AccountIdError; -use crate::utils::serde::{ByteReader, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT TYPE // ================================================================================================ @@ -23,6 +29,24 @@ pub enum AccountType { } impl AccountType { + /// Returns all account types. + pub fn all() -> [AccountType; 4] { + [ + AccountType::FungibleFaucet, + AccountType::NonFungibleFaucet, + AccountType::RegularAccountImmutableCode, + AccountType::RegularAccountUpdatableCode, + ] + } + + /// Returns the regular account types (immutable and updatable code). + pub fn regular() -> [AccountType; 2] { + [ + AccountType::RegularAccountImmutableCode, + AccountType::RegularAccountUpdatableCode, + ] + } + /// Returns `true` if the account is a faucet. pub fn is_faucet(&self) -> bool { matches!(self, Self::FungibleFaucet | Self::NonFungibleFaucet) @@ -62,7 +86,7 @@ impl rand::distr::Distribution for rand::distr::StandardUniform { // ================================================================================================ impl Serializable for AccountType { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write_u8(*self as u8); } } diff --git a/crates/miden-protocol/src/account/account_id/id_prefix.rs b/crates/miden-protocol/src/account/account_id/id_prefix.rs index 46207bcc85..63669c59ee 100644 --- a/crates/miden-protocol/src/account/account_id/id_prefix.rs +++ b/crates/miden-protocol/src/account/account_id/id_prefix.rs @@ -4,7 +4,7 @@ use core::fmt; use super::v0; use crate::Felt; use crate::account::account_id::AccountIdPrefixV0; -use crate::account::{AccountIdV0, AccountIdVersion, AccountStorageMode, AccountType}; +use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::errors::AccountIdError; use crate::utils::serde::{ ByteReader, @@ -57,7 +57,7 @@ impl AccountIdPrefix { pub fn new_unchecked(prefix: Felt) -> Self { // The prefix contains the metadata. // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(prefix.as_int()) + match v0::extract_version(prefix.as_canonical_u64()) .expect("prefix should contain a valid account ID version") { AccountIdVersion::Version0 => Self::V0(AccountIdPrefixV0::new_unchecked(prefix)), @@ -73,7 +73,7 @@ impl AccountIdPrefix { pub fn new(prefix: Felt) -> Result { // The prefix contains the metadata. // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(prefix.as_int())? { + match v0::extract_version(prefix.as_canonical_u64())? { AccountIdVersion::Version0 => AccountIdPrefixV0::new(prefix).map(Self::V0), } } @@ -89,14 +89,14 @@ impl AccountIdPrefix { } /// Returns the prefix as a [`u64`]. - pub const fn as_u64(&self) -> u64 { + pub fn as_u64(&self) -> u64 { match self { AccountIdPrefix::V0(id_prefix) => id_prefix.as_u64(), } } /// Returns the type of this account ID. - pub const fn account_type(&self) -> AccountType { + pub fn account_type(&self) -> AccountType { match self { AccountIdPrefix::V0(id_prefix) => id_prefix.account_type(), } @@ -153,20 +153,6 @@ impl AccountIdPrefix { AccountIdPrefix::V0(id_prefix) => id_prefix.to_hex(), } } - - /// Returns `felt` with the fungible bit set to zero. The version must be passed as the location - /// of the fungible bit may depend on the underlying account ID version. - pub(crate) fn clear_fungible_bit(version: AccountIdVersion, felt: Felt) -> Felt { - match version { - AccountIdVersion::Version0 => { - // Set the fungible bit to zero by taking the bitwise `and` of the felt with the - // inverted is_faucet mask. - let clear_fungible_bit_mask = !AccountIdV0::IS_FAUCET_MASK; - Felt::try_from(felt.as_int() & clear_fungible_bit_mask) - .expect("felt should still be valid as we cleared a bit and did not set any") - }, - } - } } // CONVERSIONS FROM ACCOUNT ID PREFIX @@ -237,8 +223,11 @@ impl TryFrom for AccountIdPrefix { /// Returns an error if any of the ID constraints are not met. See the [constraints /// documentation](super::AccountId#constraints) for details. fn try_from(value: u64) -> Result { - let element = Felt::try_from(value.to_le_bytes().as_slice()) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement)?; + let element = Felt::try_from(value).map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; Self::new(element) } } diff --git a/crates/miden-protocol/src/account/account_id/mod.rs b/crates/miden-protocol/src/account/account_id/mod.rs index 03a575fbf8..0b0c9c137a 100644 --- a/crates/miden-protocol/src/account/account_id/mod.rs +++ b/crates/miden-protocol/src/account/account_id/mod.rs @@ -19,13 +19,18 @@ use core::fmt; use bech32::primitives::decode::ByteIter; pub use id_version::AccountIdVersion; use miden_core::Felt; -use miden_core::utils::{ByteReader, Deserializable, Serializable}; use miden_crypto::utils::hex_to_bytes; -use miden_processor::DeserializationError; use crate::Word; use crate::address::NetworkId; use crate::errors::{AccountError, AccountIdError}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The identifier of an [`Account`](crate::account::Account). /// @@ -142,13 +147,29 @@ impl AccountId { pub fn new_unchecked(elements: [Felt; 2]) -> Self { // The prefix contains the metadata. // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(elements[0].as_int()) + match v0::extract_version(elements[0].as_canonical_u64()) .expect("prefix should contain a valid account ID version") { AccountIdVersion::Version0 => Self::V0(AccountIdV0::new_unchecked(elements)), } } + /// Decodes an [`AccountId`] from the provided suffix and prefix felts. + /// + /// # Errors + /// + /// Returns an error if any of the ID constraints are not met. See the [constraints + /// documentation](AccountId#constraints) for details. + pub fn try_from_elements(suffix: Felt, prefix: Felt) -> Result { + // The prefix contains the metadata. + // If we add more versions in the future, we may need to generalize this. + match v0::extract_version(prefix.as_canonical_u64())? { + AccountIdVersion::Version0 => { + AccountIdV0::try_from_elements(suffix, prefix).map(Self::V0) + }, + } + } + /// Constructs an [`AccountId`] for testing purposes with the given account type, storage /// mode. /// @@ -207,7 +228,7 @@ impl AccountId { // -------------------------------------------------------------------------------------------- /// Returns the type of this account ID. - pub const fn account_type(&self) -> AccountType { + pub fn account_type(&self) -> AccountType { match self { AccountId::V0(account_id) => account_id.account_type(), } @@ -398,25 +419,6 @@ impl From for AccountId { } } -impl TryFrom<[Felt; 2]> for AccountId { - type Error = AccountIdError; - - /// Returns an [`AccountId`] instantiated with the provided field elements where `elements[0]` - /// is taken as the prefix and `elements[1]` is taken as the suffix. - /// - /// # Errors - /// - /// Returns an error if any of the ID constraints are not met. See the [constraints - /// documentation](AccountId#constraints) for details. - fn try_from(elements: [Felt; 2]) -> Result { - // The prefix contains the metadata. - // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(elements[0].as_int())? { - AccountIdVersion::Version0 => AccountIdV0::try_from(elements).map(Self::V0), - } - } -} - impl TryFrom<[u8; 15]> for AccountId { type Error = AccountIdError; @@ -481,7 +483,7 @@ impl fmt::Display for AccountId { // ================================================================================================ impl Serializable for AccountId { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { match self { AccountId::V0(account_id) => { account_id.write_into(target); diff --git a/crates/miden-protocol/src/account/account_id/seed.rs b/crates/miden-protocol/src/account/account_id/seed.rs index 8ad1be02a7..ba3f285bd8 100644 --- a/crates/miden-protocol/src/account/account_id/seed.rs +++ b/crates/miden-protocol/src/account/account_id/seed.rs @@ -2,7 +2,7 @@ use alloc::vec::Vec; use crate::account::account_id::AccountIdVersion; use crate::account::account_id::v0::{compute_digest, validate_prefix}; -use crate::account::{AccountStorageMode, AccountType}; +use crate::account::{AccountIdV0, AccountStorageMode, AccountType}; use crate::errors::AccountError; use crate::{Felt, Word}; @@ -52,9 +52,9 @@ fn compute_account_seed_single( loop { // Check if the seed satisfies the specified type, storage mode and version. Additionally, // the most significant bit of the suffix must be zero to ensure felt validity. - let prefix = current_digest.as_elements()[0]; - let suffix = current_digest.as_elements()[1]; - let is_suffix_msb_zero = suffix.as_int() >> 63 == 0; + let suffix = current_digest[AccountIdV0::SEED_DIGEST_SUFFIX_ELEMENT_IDX]; + let prefix = current_digest[AccountIdV0::SEED_DIGEST_PREFIX_ELEMENT_IDX]; + let is_suffix_msb_zero = suffix.as_canonical_u64() >> 63 == 0; if let Ok((computed_account_type, computed_storage_mode, computed_version)) = validate_prefix(prefix) diff --git a/crates/miden-protocol/src/account/account_id/v0/mod.rs b/crates/miden-protocol/src/account/account_id/v0/mod.rs index 34ad3ebbb9..03fb3bc5a0 100644 --- a/crates/miden-protocol/src/account/account_id/v0/mod.rs +++ b/crates/miden-protocol/src/account/account_id/v0/mod.rs @@ -20,7 +20,13 @@ use crate::account::account_id::storage_mode::{NETWORK, PRIVATE, PUBLIC}; use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::address::AddressType; use crate::errors::{AccountError, AccountIdError, Bech32Error}; -use crate::utils::{ByteReader, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{EMPTY_WORD, Felt, Hasher, Word}; // ACCOUNT ID VERSION 0 @@ -31,14 +37,14 @@ use crate::{EMPTY_WORD, Felt, Hasher, Word}; /// See the [`AccountId`](super::AccountId) type's documentation for details. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct AccountIdV0 { - prefix: Felt, suffix: Felt, + prefix: Felt, } impl Hash for AccountIdV0 { fn hash(&self, state: &mut H) { - self.prefix.inner().hash(state); - self.suffix.inner().hash(state); + self.prefix.as_canonical_u64().hash(state); + self.suffix.as_canonical_u64().hash(state); } } @@ -61,8 +67,11 @@ impl AccountIdV0 { pub(crate) const STORAGE_MODE_MASK: u8 = 0b11 << Self::STORAGE_MODE_SHIFT; pub(crate) const STORAGE_MODE_SHIFT: u64 = 6; - /// The bit at index 5 of the prefix encodes whether the account is a faucet. - pub(crate) const IS_FAUCET_MASK: u64 = 0b10 << Self::TYPE_SHIFT; + /// The element index in the seed digest that becomes the account ID suffix (after + /// [`shape_suffix`]). + pub(crate) const SEED_DIGEST_SUFFIX_ELEMENT_IDX: usize = 0; + /// The element index in the seed digest that becomes the account ID prefix. + pub(crate) const SEED_DIGEST_PREFIX_ELEMENT_IDX: usize = 1; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -75,13 +84,14 @@ impl AccountIdV0 { ) -> Result { let seed_digest = compute_digest(seed, code_commitment, storage_commitment); - let mut felts: [Felt; 2] = seed_digest.as_elements()[0..2] - .try_into() - .expect("we should have sliced off 2 elements"); + // Use the first half-word of the seed digest as the account ID, where the prefix is the + // most significant element. + let mut suffix = seed_digest[Self::SEED_DIGEST_SUFFIX_ELEMENT_IDX]; + let prefix = seed_digest[Self::SEED_DIGEST_PREFIX_ELEMENT_IDX]; - felts[1] = shape_suffix(felts[1]); + suffix = shape_suffix(suffix); - account_id_from_felts(felts) + Self::try_from_elements(suffix, prefix) } /// See [`AccountId::new_unchecked`](super::AccountId::new_unchecked) for details. @@ -98,6 +108,14 @@ impl AccountIdV0 { Self { prefix, suffix } } + /// See [`AccountId::try_from_elements`](super::AccountId::try_from_elements) for details. + pub fn try_from_elements(suffix: Felt, prefix: Felt) -> Result { + validate_suffix(suffix)?; + validate_prefix(prefix)?; + + Ok(AccountIdV0 { suffix, prefix }) + } + /// See [`AccountId::dummy`](super::AccountId::dummy) for details. #[cfg(any(feature = "testing", test))] pub fn dummy( @@ -130,12 +148,12 @@ impl AccountIdV0 { let mut suffix = Felt::new(u64::from_be_bytes(suffix_bytes)); // Clear the most significant bit of the suffix. - suffix = Felt::try_from(suffix.as_int() & 0x7fff_ffff_ffff_ffff) + suffix = Felt::try_from(suffix.as_canonical_u64() & 0x7fff_ffff_ffff_ffff) .expect("no bits were set so felt should still be valid"); suffix = shape_suffix(suffix); - let account_id = account_id_from_felts([prefix, suffix]) + let account_id = Self::try_from_elements(suffix, prefix) .expect("we should have shaped the felts to produce a valid id"); debug_assert_eq!(account_id.account_type(), account_type); @@ -167,8 +185,8 @@ impl AccountIdV0 { // -------------------------------------------------------------------------------------------- /// See [`AccountId::account_type`](super::AccountId::account_type) for details. - pub const fn account_type(&self) -> AccountType { - extract_type(self.prefix.as_int()) + pub fn account_type(&self) -> AccountType { + extract_type(self.prefix.as_canonical_u64()) } /// See [`AccountId::is_faucet`](super::AccountId::is_faucet) for details. @@ -211,7 +229,7 @@ impl AccountIdV0 { // big-endian hex string. Only then can we cut off the last zero byte by truncating. We // cannot use `:014x` padding. let mut hex_string = - format!("0x{:016x}{:016x}", self.prefix().as_u64(), self.suffix().as_int()); + format!("0x{:016x}{:016x}", self.prefix().as_u64(), self.suffix().as_canonical_u64()); hex_string.truncate(32); hex_string } @@ -322,7 +340,7 @@ impl From for [u8; 15] { let mut result = [0_u8; 15]; result[..8].copy_from_slice(&id.prefix().as_u64().to_be_bytes()); // The last byte of the suffix is always zero so we skip it here. - result[8..].copy_from_slice(&id.suffix().as_int().to_be_bytes()[..7]); + result[8..].copy_from_slice(&id.suffix().as_canonical_u64().to_be_bytes()[..7]); result } } @@ -330,7 +348,7 @@ impl From for [u8; 15] { impl From for u128 { fn from(id: AccountIdV0) -> Self { let mut le_bytes = [0_u8; 16]; - le_bytes[..8].copy_from_slice(&id.suffix().as_int().to_le_bytes()); + le_bytes[..8].copy_from_slice(&id.suffix().as_canonical_u64().to_le_bytes()); le_bytes[8..].copy_from_slice(&id.prefix().as_u64().to_le_bytes()); u128::from_le_bytes(le_bytes) } @@ -339,16 +357,6 @@ impl From for u128 { // CONVERSIONS TO ACCOUNT ID // ================================================================================================ -impl TryFrom<[Felt; 2]> for AccountIdV0 { - type Error = AccountIdError; - - /// See [`TryFrom<[Felt; 2]> for - /// AccountId`](super::AccountId#impl-TryFrom<%5BFelt;+2%5D>-for-AccountId) for details. - fn try_from(elements: [Felt; 2]) -> Result { - account_id_from_felts(elements) - } -} - impl TryFrom<[u8; 15]> for AccountIdV0 { type Error = AccountIdError; @@ -369,13 +377,22 @@ impl TryFrom<[u8; 15]> for AccountIdV0 { let mut suffix_bytes = [0; 8]; suffix_bytes[1..8].copy_from_slice(suffix_slice); - let prefix = Felt::try_from(prefix_slice) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement)?; + let prefix = Felt::try_from(u64::from_le_bytes( + prefix_slice.try_into().expect("prefix slice should be 8 bytes"), + )) + .map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; - let suffix = Felt::try_from(suffix_bytes.as_slice()) - .map_err(AccountIdError::AccountIdInvalidSuffixFieldElement)?; + let suffix = Felt::try_from(u64::from_le_bytes(suffix_bytes)).map_err(|err| { + AccountIdError::AccountIdInvalidSuffixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; - Self::try_from([prefix, suffix]) + Self::try_from_elements(suffix, prefix) } } @@ -396,7 +413,7 @@ impl TryFrom for AccountIdV0 { // ================================================================================================ impl Serializable for AccountIdV0 { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { let bytes: [u8; 15] = (*self).into(); bytes.write_into(target); } @@ -417,25 +434,12 @@ impl Deserializable for AccountIdV0 { // HELPER FUNCTIONS // ================================================================================================ -/// Returns an [AccountId] instantiated with the provided field elements. -/// -/// # Errors -/// -/// Returns an error if any of the ID constraints are not met. See the [constraints -/// documentation](AccountId#constraints) for details. -fn account_id_from_felts(elements: [Felt; 2]) -> Result { - validate_prefix(elements[0])?; - validate_suffix(elements[1])?; - - Ok(AccountIdV0 { prefix: elements[0], suffix: elements[1] }) -} - /// Checks that the prefix: /// - has known values for metadata (storage mode, type and version). pub(crate) fn validate_prefix( prefix: Felt, ) -> Result<(AccountType, AccountStorageMode, AccountIdVersion), AccountIdError> { - let prefix = prefix.as_int(); + let prefix = prefix.as_canonical_u64(); // Validate storage bits. let storage_mode = extract_storage_mode(prefix)?; @@ -451,8 +455,8 @@ pub(crate) fn validate_prefix( /// Checks that the suffix: /// - has its most significant bit set to zero. /// - has its lower 8 bits set to zero. -const fn validate_suffix(suffix: Felt) -> Result<(), AccountIdError> { - let suffix = suffix.as_int(); +fn validate_suffix(suffix: Felt) -> Result<(), AccountIdError> { + let suffix = suffix.as_canonical_u64(); // Validate most significant bit is zero. if suffix >> 63 != 0 { @@ -503,7 +507,7 @@ pub(crate) const fn extract_type(prefix: u64) -> AccountType { /// Shapes the suffix so it meets the requirements of the account ID, by setting the lower 8 bits to /// zero. fn shape_suffix(suffix: Felt) -> Felt { - let mut suffix = suffix.as_int(); + let mut suffix = suffix.as_canonical_u64(); // Clear the lower 8 bits. suffix &= 0xffff_ffff_ffff_ff00; diff --git a/crates/miden-protocol/src/account/account_id/v0/prefix.rs b/crates/miden-protocol/src/account/account_id/v0/prefix.rs index d2b0f6d116..26a5ffb325 100644 --- a/crates/miden-protocol/src/account/account_id/v0/prefix.rs +++ b/crates/miden-protocol/src/account/account_id/v0/prefix.rs @@ -3,12 +3,17 @@ use core::fmt; use core::hash::Hash; use miden_core::Felt; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use crate::account::account_id::v0::{self, validate_prefix}; use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::errors::AccountIdError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT ID PREFIX VERSION 0 // ================================================================================================ @@ -23,7 +28,7 @@ pub struct AccountIdPrefixV0 { impl Hash for AccountIdPrefixV0 { fn hash(&self, state: &mut H) { - self.prefix.inner().hash(state); + self.prefix.as_canonical_u64().hash(state); } } @@ -65,14 +70,14 @@ impl AccountIdPrefixV0 { } /// See [`AccountIdPrefix::as_u64`](crate::account::AccountIdPrefix::as_u64) for details. - pub const fn as_u64(&self) -> u64 { - self.prefix.as_int() + pub fn as_u64(&self) -> u64 { + self.prefix.as_canonical_u64() } /// See [`AccountIdPrefix::account_type`](crate::account::AccountIdPrefix::account_type) for /// details. - pub const fn account_type(&self) -> AccountType { - v0::extract_type(self.prefix.as_int()) + pub fn account_type(&self) -> AccountType { + v0::extract_type(self.prefix.as_canonical_u64()) } /// See [`AccountIdPrefix::is_faucet`](crate::account::AccountIdPrefix::is_faucet) for details. @@ -89,7 +94,7 @@ impl AccountIdPrefixV0 { /// See [`AccountIdPrefix::storage_mode`](crate::account::AccountIdPrefix::storage_mode) for /// details. pub fn storage_mode(&self) -> AccountStorageMode { - v0::extract_storage_mode(self.prefix.as_int()) + v0::extract_storage_mode(self.prefix.as_canonical_u64()) .expect("account ID prefix should have been constructed with a valid storage mode") } @@ -100,13 +105,13 @@ impl AccountIdPrefixV0 { /// See [`AccountIdPrefix::version`](crate::account::AccountIdPrefix::version) for details. pub fn version(&self) -> AccountIdVersion { - v0::extract_version(self.prefix.as_int()) + v0::extract_version(self.prefix.as_canonical_u64()) .expect("account ID prefix should have been constructed with a valid version") } /// See [`AccountIdPrefix::to_hex`](crate::account::AccountIdPrefix::to_hex) for details. pub fn to_hex(self) -> String { - format!("0x{:016x}", self.prefix.as_int()) + format!("0x{:016x}", self.prefix.as_canonical_u64()) } } @@ -122,14 +127,14 @@ impl From for Felt { impl From for [u8; 8] { fn from(id: AccountIdPrefixV0) -> Self { let mut result = [0_u8; 8]; - result[..8].copy_from_slice(&id.prefix.as_int().to_be_bytes()); + result[..8].copy_from_slice(&id.prefix.as_canonical_u64().to_be_bytes()); result } } impl From for u64 { fn from(id: AccountIdPrefixV0) -> Self { - id.prefix.as_int() + id.prefix.as_canonical_u64() } } @@ -143,11 +148,16 @@ impl TryFrom<[u8; 8]> for AccountIdPrefixV0 { /// AccountIdPrefix`](crate::account::AccountIdPrefix#impl-TryFrom<%5Bu8;+8% /// 5D>-for-AccountIdPrefix) for details. fn try_from(mut value: [u8; 8]) -> Result { - // Felt::try_from expects little-endian order. + // Reverse to little-endian order. value.reverse(); - Felt::try_from(value.as_slice()) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement) + let num = u64::from_le_bytes(value); + Felt::try_from(num) + .map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement( + DeserializationError::InvalidValue(err.to_string()), + ) + }) .and_then(Self::new) } } @@ -159,8 +169,11 @@ impl TryFrom for AccountIdPrefixV0 { /// AccountIdPrefix`](crate::account::AccountIdPrefix#impl-TryFrom-for-AccountIdPrefix) /// for details. fn try_from(value: u64) -> Result { - let element = Felt::try_from(value.to_le_bytes().as_slice()) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement)?; + let element = Felt::try_from(value).map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; Self::new(element) } } @@ -187,7 +200,7 @@ impl PartialOrd for AccountIdPrefixV0 { impl Ord for AccountIdPrefixV0 { fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.prefix.as_int().cmp(&other.prefix.as_int()) + self.prefix.as_canonical_u64().cmp(&other.prefix.as_canonical_u64()) } } diff --git a/crates/miden-protocol/src/account/auth.rs b/crates/miden-protocol/src/account/auth.rs index d773ee106a..e4947095db 100644 --- a/crates/miden-protocol/src/account/auth.rs +++ b/crates/miden-protocol/src/account/auth.rs @@ -1,8 +1,11 @@ +use alloc::borrow::ToOwned; +use alloc::string::ToString; use alloc::vec::Vec; +use core::str::FromStr; use rand::{CryptoRng, Rng}; -use crate::crypto::dsa::{ecdsa_k256_keccak, falcon512_rpo}; +use crate::crypto::dsa::{ecdsa_k256_keccak, falcon512_poseidon2}; use crate::errors::AuthSchemeError; use crate::utils::serde::{ ByteReader, @@ -11,15 +14,18 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::{Felt, Hasher, Word}; +use crate::{Felt, Word}; // AUTH SCHEME // ================================================================================================ /// Identifier of signature schemes use for transaction authentication -const FALCON_512_RPO: u8 = 2; +const FALCON512_POSEIDON2: u8 = 2; const ECDSA_K256_KECCAK: u8 = 1; +const FALCON512_POSEIDON2_STR: &str = "Falcon512Poseidon2"; +const ECDSA_K256_KECCAK_STR: &str = "EcdsaK256Keccak"; + /// Defines standard authentication schemes (i.e., signature schemes) available in the Miden /// protocol. #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -28,10 +34,10 @@ const ECDSA_K256_KECCAK: u8 = 1; pub enum AuthScheme { /// A deterministic Falcon512 signature scheme. /// - /// This version differs from the reference Falcon512 implementation in its use of the RPO - /// algebraic hash function in its hash-to-point algorithm to make signatures very efficient - /// to verify inside Miden VM. - Falcon512Rpo = FALCON_512_RPO, + /// This version differs from the reference Falcon512 implementation in its use of the poseidon2 + /// hash function in its hash-to-point algorithm to make signatures very efficient to verify + /// inside Miden VM. + Falcon512Poseidon2 = FALCON512_POSEIDON2, /// ECDSA signature scheme over secp256k1 curve using Keccak to hash the messages when signing. EcdsaK256Keccak = ECDSA_K256_KECCAK, @@ -47,8 +53,8 @@ impl AuthScheme { impl core::fmt::Display for AuthScheme { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { - Self::Falcon512Rpo => f.write_str("Falcon512Rpo"), - Self::EcdsaK256Keccak => f.write_str("EcdsaK256Keccak"), + Self::Falcon512Poseidon2 => f.write_str(FALCON512_POSEIDON2_STR), + Self::EcdsaK256Keccak => f.write_str(ECDSA_K256_KECCAK_STR), } } } @@ -58,9 +64,21 @@ impl TryFrom for AuthScheme { fn try_from(value: u8) -> Result { match value { - FALCON_512_RPO => Ok(Self::Falcon512Rpo), + FALCON512_POSEIDON2 => Ok(Self::Falcon512Poseidon2), ECDSA_K256_KECCAK => Ok(Self::EcdsaK256Keccak), - value => Err(AuthSchemeError::InvalidAuthSchemeIdentifier(value)), + value => Err(AuthSchemeError::InvalidAuthSchemeIdentifier(value.to_string())), + } + } +} + +impl FromStr for AuthScheme { + type Err = AuthSchemeError; + + fn from_str(input: &str) -> Result { + match input { + FALCON512_POSEIDON2_STR => Ok(AuthScheme::Falcon512Poseidon2), + ECDSA_K256_KECCAK_STR => Ok(AuthScheme::EcdsaK256Keccak), + other => Err(AuthSchemeError::InvalidAuthSchemeIdentifier(other.to_owned())), } } } @@ -79,7 +97,7 @@ impl Serializable for AuthScheme { impl Deserializable for AuthScheme { fn read_from(source: &mut R) -> Result { match source.read_u8()? { - FALCON_512_RPO => Ok(Self::Falcon512Rpo), + FALCON512_POSEIDON2 => Ok(Self::Falcon512Poseidon2), ECDSA_K256_KECCAK => Ok(Self::EcdsaK256Keccak), value => Err(DeserializationError::InvalidValue(format!( "auth scheme identifier `{value}` is not valid" @@ -96,20 +114,20 @@ impl Deserializable for AuthScheme { #[non_exhaustive] #[repr(u8)] pub enum AuthSecretKey { - Falcon512Rpo(falcon512_rpo::SecretKey) = FALCON_512_RPO, + Falcon512Poseidon2(falcon512_poseidon2::SecretKey) = FALCON512_POSEIDON2, EcdsaK256Keccak(ecdsa_k256_keccak::SecretKey) = ECDSA_K256_KECCAK, } impl AuthSecretKey { - /// Generates an Falcon512Rpo secret key from the OS-provided randomness. + /// Generates an Falcon512Poseidon2 secret key from the OS-provided randomness. #[cfg(feature = "std")] - pub fn new_falcon512_rpo() -> Self { - Self::Falcon512Rpo(falcon512_rpo::SecretKey::new()) + pub fn new_falcon512_poseidon2() -> Self { + Self::Falcon512Poseidon2(falcon512_poseidon2::SecretKey::new()) } - /// Generates an Falcon512Rpo secrete key using the provided random number generator. - pub fn new_falcon512_rpo_with_rng(rng: &mut R) -> Self { - Self::Falcon512Rpo(falcon512_rpo::SecretKey::with_rng(rng)) + /// Generates an Falcon512Poseidon2 secrete key using the provided random number generator. + pub fn new_falcon512_poseidon2_with_rng(rng: &mut R) -> Self { + Self::Falcon512Poseidon2(falcon512_poseidon2::SecretKey::with_rng(rng)) } /// Generates an EcdsaK256Keccak secret key from the OS-provided randomness. @@ -132,7 +150,7 @@ impl AuthSecretKey { rng: &mut R, ) -> Result { match scheme { - AuthScheme::Falcon512Rpo => Ok(Self::new_falcon512_rpo_with_rng(rng)), + AuthScheme::Falcon512Poseidon2 => Ok(Self::new_falcon512_poseidon2_with_rng(rng)), AuthScheme::EcdsaK256Keccak => Ok(Self::new_ecdsa_k256_keccak_with_rng(rng)), } } @@ -144,7 +162,7 @@ impl AuthSecretKey { #[cfg(feature = "std")] pub fn with_scheme(scheme: AuthScheme) -> Result { match scheme { - AuthScheme::Falcon512Rpo => Ok(Self::new_falcon512_rpo()), + AuthScheme::Falcon512Poseidon2 => Ok(Self::new_falcon512_poseidon2()), AuthScheme::EcdsaK256Keccak => Ok(Self::new_ecdsa_k256_keccak()), } } @@ -152,7 +170,7 @@ impl AuthSecretKey { /// Returns the authentication scheme of this secret key. pub fn auth_scheme(&self) -> AuthScheme { match self { - AuthSecretKey::Falcon512Rpo(_) => AuthScheme::Falcon512Rpo, + AuthSecretKey::Falcon512Poseidon2(_) => AuthScheme::Falcon512Poseidon2, AuthSecretKey::EcdsaK256Keccak(_) => AuthScheme::EcdsaK256Keccak, } } @@ -160,7 +178,9 @@ impl AuthSecretKey { /// Returns a public key associated with this secret key. pub fn public_key(&self) -> PublicKey { match self { - AuthSecretKey::Falcon512Rpo(key) => PublicKey::Falcon512Rpo(key.public_key()), + AuthSecretKey::Falcon512Poseidon2(key) => { + PublicKey::Falcon512Poseidon2(key.public_key()) + }, AuthSecretKey::EcdsaK256Keccak(key) => PublicKey::EcdsaK256Keccak(key.public_key()), } } @@ -168,7 +188,9 @@ impl AuthSecretKey { /// Signs the provided message with this secret key. pub fn sign(&self, message: Word) -> Signature { match self { - AuthSecretKey::Falcon512Rpo(key) => Signature::Falcon512Rpo(key.sign(message)), + AuthSecretKey::Falcon512Poseidon2(key) => { + Signature::Falcon512Poseidon2(key.sign(message)) + }, AuthSecretKey::EcdsaK256Keccak(key) => Signature::EcdsaK256Keccak(key.sign(message)), } } @@ -178,7 +200,7 @@ impl Serializable for AuthSecretKey { fn write_into(&self, target: &mut W) { self.auth_scheme().write_into(target); match self { - AuthSecretKey::Falcon512Rpo(key) => key.write_into(target), + AuthSecretKey::Falcon512Poseidon2(key) => key.write_into(target), AuthSecretKey::EcdsaK256Keccak(key) => key.write_into(target), } } @@ -187,9 +209,9 @@ impl Serializable for AuthSecretKey { impl Deserializable for AuthSecretKey { fn read_from(source: &mut R) -> Result { match source.read::()? { - AuthScheme::Falcon512Rpo => { - let secret_key = falcon512_rpo::SecretKey::read_from(source)?; - Ok(AuthSecretKey::Falcon512Rpo(secret_key)) + AuthScheme::Falcon512Poseidon2 => { + let secret_key = falcon512_poseidon2::SecretKey::read_from(source)?; + Ok(AuthSecretKey::Falcon512Poseidon2(secret_key)) }, AuthScheme::EcdsaK256Keccak => { let secret_key = ecdsa_k256_keccak::SecretKey::read_from(source)?; @@ -212,8 +234,8 @@ impl core::fmt::Display for PublicKeyCommitment { } } -impl From for PublicKeyCommitment { - fn from(value: falcon512_rpo::PublicKey) -> Self { +impl From for PublicKeyCommitment { + fn from(value: falcon512_poseidon2::PublicKey) -> Self { Self(value.to_commitment()) } } @@ -234,7 +256,7 @@ impl From for PublicKeyCommitment { #[derive(Clone, Debug)] #[non_exhaustive] pub enum PublicKey { - Falcon512Rpo(falcon512_rpo::PublicKey), + Falcon512Poseidon2(falcon512_poseidon2::PublicKey), EcdsaK256Keccak(ecdsa_k256_keccak::PublicKey), } @@ -242,7 +264,7 @@ impl PublicKey { /// Returns the authentication scheme of this public key. pub fn auth_scheme(&self) -> AuthScheme { match self { - PublicKey::Falcon512Rpo(_) => AuthScheme::Falcon512Rpo, + PublicKey::Falcon512Poseidon2(_) => AuthScheme::Falcon512Poseidon2, PublicKey::EcdsaK256Keccak(_) => AuthScheme::EcdsaK256Keccak, } } @@ -250,7 +272,7 @@ impl PublicKey { /// Returns a commitment to this public key. pub fn to_commitment(&self) -> PublicKeyCommitment { match self { - PublicKey::Falcon512Rpo(key) => key.to_commitment().into(), + PublicKey::Falcon512Poseidon2(key) => key.to_commitment().into(), PublicKey::EcdsaK256Keccak(key) => key.to_commitment().into(), } } @@ -258,7 +280,7 @@ impl PublicKey { /// Verifies the provided signature against the provided message and this public key. pub fn verify(&self, message: Word, signature: Signature) -> bool { match (self, signature) { - (PublicKey::Falcon512Rpo(key), Signature::Falcon512Rpo(sig)) => { + (PublicKey::Falcon512Poseidon2(key), Signature::Falcon512Poseidon2(sig)) => { key.verify(message, &sig) }, (PublicKey::EcdsaK256Keccak(key), Signature::EcdsaK256Keccak(sig)) => { @@ -273,7 +295,7 @@ impl Serializable for PublicKey { fn write_into(&self, target: &mut W) { self.auth_scheme().write_into(target); match self { - PublicKey::Falcon512Rpo(pub_key) => pub_key.write_into(target), + PublicKey::Falcon512Poseidon2(pub_key) => pub_key.write_into(target), PublicKey::EcdsaK256Keccak(pub_key) => pub_key.write_into(target), } } @@ -282,9 +304,9 @@ impl Serializable for PublicKey { impl Deserializable for PublicKey { fn read_from(source: &mut R) -> Result { match source.read::()? { - AuthScheme::Falcon512Rpo => { - let pub_key = falcon512_rpo::PublicKey::read_from(source)?; - Ok(PublicKey::Falcon512Rpo(pub_key)) + AuthScheme::Falcon512Poseidon2 => { + let pub_key = falcon512_poseidon2::PublicKey::read_from(source)?; + Ok(PublicKey::Falcon512Poseidon2(pub_key)) }, AuthScheme::EcdsaK256Keccak => { let pub_key = ecdsa_k256_keccak::PublicKey::read_from(source)?; @@ -304,7 +326,7 @@ impl Deserializable for PublicKey { /// provider. To prepare the signature, use the provided `to_prepared_signature` method: /// ```rust,no_run /// use miden_protocol::account::auth::Signature; -/// use miden_protocol::crypto::dsa::falcon512_rpo::SecretKey; +/// use miden_protocol::crypto::dsa::falcon512_poseidon2::SecretKey; /// use miden_protocol::{Felt, Word}; /// /// let secret_key = SecretKey::new(); @@ -315,7 +337,7 @@ impl Deserializable for PublicKey { #[derive(Clone, Debug)] #[repr(u8)] pub enum Signature { - Falcon512Rpo(falcon512_rpo::Signature) = FALCON_512_RPO, + Falcon512Poseidon2(falcon512_poseidon2::Signature) = FALCON512_POSEIDON2, EcdsaK256Keccak(ecdsa_k256_keccak::Signature) = ECDSA_K256_KECCAK, } @@ -323,7 +345,7 @@ impl Signature { /// Returns the authentication scheme of this signature. pub fn auth_scheme(&self) -> AuthScheme { match self { - Signature::Falcon512Rpo(_) => AuthScheme::Falcon512Rpo, + Signature::Falcon512Poseidon2(_) => AuthScheme::Falcon512Poseidon2, Signature::EcdsaK256Keccak(_) => AuthScheme::EcdsaK256Keccak, } } @@ -336,25 +358,22 @@ impl Signature { pub fn to_prepared_signature(&self, msg: Word) -> Vec { // TODO: the `expect()` should be changed to an error; but that will be a part of a bigger // refactoring - let mut result = match self { - Signature::Falcon512Rpo(sig) => prepare_falcon512_rpo_signature(sig), + match self { + Signature::Falcon512Poseidon2(sig) => { + miden_core_lib::dsa::falcon512_poseidon2::encode_signature(sig.public_key(), sig) + }, Signature::EcdsaK256Keccak(sig) => { let pk = ecdsa_k256_keccak::PublicKey::recover_from(msg, sig) .expect("inferring public key from signature and message should succeed"); miden_core_lib::dsa::ecdsa_k256_keccak::encode_signature(&pk, sig) }, - }; - - // reverse the signature data so that when it is pushed onto the advice stack, the first - // element of the vector is at the top of the stack - result.reverse(); - result + } } } -impl From for Signature { - fn from(signature: falcon512_rpo::Signature) -> Self { - Signature::Falcon512Rpo(signature) +impl From for Signature { + fn from(signature: falcon512_poseidon2::Signature) -> Self { + Signature::Falcon512Poseidon2(signature) } } @@ -362,7 +381,7 @@ impl Serializable for Signature { fn write_into(&self, target: &mut W) { self.auth_scheme().write_into(target); match self { - Signature::Falcon512Rpo(signature) => signature.write_into(target), + Signature::Falcon512Poseidon2(signature) => signature.write_into(target), Signature::EcdsaK256Keccak(signature) => signature.write_into(target), } } @@ -371,9 +390,9 @@ impl Serializable for Signature { impl Deserializable for Signature { fn read_from(source: &mut R) -> Result { match source.read::()? { - AuthScheme::Falcon512Rpo => { - let signature = falcon512_rpo::Signature::read_from(source)?; - Ok(Signature::Falcon512Rpo(signature)) + AuthScheme::Falcon512Poseidon2 => { + let signature = falcon512_poseidon2::Signature::read_from(source)?; + Ok(Signature::Falcon512Poseidon2(signature)) }, AuthScheme::EcdsaK256Keccak => { let signature = ecdsa_k256_keccak::Signature::read_from(source)?; @@ -382,53 +401,3 @@ impl Deserializable for Signature { } } } - -// SIGNATURE PREPARATION -// ================================================================================================ - -/// Converts a Falcon [falcon512_rpo::Signature] to a vector of values to be pushed onto the -/// advice stack. The values are the ones required for a Falcon signature verification inside the VM -/// and they are: -/// -/// 1. The challenge point at which we evaluate the polynomials in the subsequent three bullet -/// points, i.e. `h`, `s2` and `pi`, to check the product relationship. -/// 2. The expanded public key represented as the coefficients of a polynomial `h` of degree < 512. -/// 3. The signature represented as the coefficients of a polynomial `s2` of degree < 512. -/// 4. The product of the above two polynomials `pi` in the ring of polynomials with coefficients in -/// the Miden field. -/// 5. The nonce represented as 8 field elements. -fn prepare_falcon512_rpo_signature(sig: &falcon512_rpo::Signature) -> Vec { - use falcon512_rpo::Polynomial; - - // The signature is composed of a nonce and a polynomial s2 - // The nonce is represented as 8 field elements. - let nonce = sig.nonce(); - // We convert the signature to a polynomial - let s2 = sig.sig_poly(); - // We also need in the VM the expanded key corresponding to the public key that was provided - // via the operand stack - let h = sig.public_key(); - // Lastly, for the probabilistic product routine that is part of the verification procedure, - // we need to compute the product of the expanded key and the signature polynomial in - // the ring of polynomials with coefficients in the Miden field. - let pi = Polynomial::mul_modulo_p(h, s2); - - // We now push the expanded key, the signature polynomial, and the product of the - // expanded key and the signature polynomial to the advice stack. We also push - // the challenge point at which the previous polynomials will be evaluated. - // Finally, we push the nonce needed for the hash-to-point algorithm. - - let mut polynomials: Vec = - h.coefficients.iter().map(|a| Felt::from(a.value() as u32)).collect(); - polynomials.extend(s2.coefficients.iter().map(|a| Felt::from(a.value() as u32))); - polynomials.extend(pi.iter().map(|a| Felt::new(*a))); - - let digest_polynomials = Hasher::hash_elements(&polynomials); - let challenge = (digest_polynomials[0], digest_polynomials[1]); - - let mut result: Vec = vec![challenge.0, challenge.1]; - result.extend_from_slice(&polynomials); - result.extend_from_slice(&nonce.to_elements()); - - result -} diff --git a/crates/miden-protocol/src/account/builder/mod.rs b/crates/miden-protocol/src/account/builder/mod.rs index ef708c8ae9..7a0e7bec6e 100644 --- a/crates/miden-protocol/src/account/builder/mod.rs +++ b/crates/miden-protocol/src/account/builder/mod.rs @@ -1,8 +1,6 @@ use alloc::boxed::Box; use alloc::vec::Vec; -use miden_core::FieldElement; - use crate::account::component::StorageSchema; use crate::account::{ Account, @@ -115,7 +113,7 @@ impl AccountBuilder { /// Adds a designated authentication [`AccountComponent`] to the builder. /// /// This component may contain multiple procedures, but is expected to contain exactly one - /// authentication procedure (named `auth_*`). + /// authentication procedure (marked with the `@auth_script` attribute). /// Calling this method multiple times will override the previous auth component. /// /// Procedures from this component will be placed at the beginning of the account procedure @@ -198,7 +196,8 @@ impl AccountBuilder { /// - Authentication component is missing. /// - Multiple authentication procedures are found. /// - The number of [`StorageSlot`](crate::account::StorageSlot)s of all components exceeds 255. - /// - [`MastForest::merge`](miden_processor::MastForest::merge) fails on the given components. + /// - [`MastForest::merge`](miden_processor::mast::MastForest::merge) fails on the given + /// components. /// - If duplicate assets were added to the builder (only under the `testing` feature). /// - If the vault is not empty on new accounts (only under the `testing` feature). pub fn build(mut self) -> Result { @@ -294,8 +293,7 @@ mod tests { use assert_matches::assert_matches; use miden_assembly::{Assembler, Library}; - use miden_core::FieldElement; - use miden_processor::MastNodeExt; + use miden_core::mast::MastNodeExt; use super::*; use crate::account::component::AccountComponentMetadata; @@ -346,7 +344,7 @@ mod tests { value[0] = Felt::new(custom.slot0); let metadata = - AccountComponentMetadata::new("test::custom_component1").with_supports_all_types(); + AccountComponentMetadata::new("test::custom_component1", AccountType::all()); AccountComponent::new( CUSTOM_LIBRARY1.clone(), vec![StorageSlot::with_value(CUSTOM_COMPONENT1_SLOT_NAME.clone(), value)], @@ -368,7 +366,7 @@ mod tests { value1[3] = Felt::new(custom.slot1); let metadata = - AccountComponentMetadata::new("test::custom_component2").with_supports_all_types(); + AccountComponentMetadata::new("test::custom_component2", AccountType::all()); AccountComponent::new( CUSTOM_LIBRARY2.clone(), vec![ diff --git a/crates/miden-protocol/src/account/code/header.rs b/crates/miden-protocol/src/account/code/header.rs deleted file mode 100644 index aae5b5a3ac..0000000000 --- a/crates/miden-protocol/src/account/code/header.rs +++ /dev/null @@ -1,82 +0,0 @@ -use alloc::vec::Vec; - -use miden_core::{ - Felt, - utils::{Deserializable, Serializable}, -}; -use miden_processor::Digest; - -use super::{AccountCode, build_procedure_commitment, procedures_as_elements}; -use crate::account::AccountProcedureInfo; - -/// A lightweight representation of account code that contains only procedure metadata without the -/// actual program instructions. -/// -/// Account code header consists of the following components: -/// - Code commitment, which uniquely identifies the account code. -/// - Procedure information, which contains metadata about each procedure in the account code, -/// including MAST roots, storage access permissions, and other relevant attributes. -/// -/// The header is used to provide verifiable information about account code structure and -/// storage access patterns without the need to include the full program instructions. -/// This is particularly useful for verification purposes and when the actual code execution -/// is not required. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct AccountCodeHeader { - commitment: Digest, - procedures: Vec, -} - -impl AccountCodeHeader { - /// Returns a new instance of account code header with the specified procedures. - /// - /// The code commitment is computed during instantiation based on the provided procedures. - pub fn new(procedures: Vec) -> Self { - let commitment = build_procedure_commitment(&procedures); - AccountCodeHeader { procedures, commitment } - } - - /// Returns the commitment of this account code header. - pub fn commitment(&self) -> Digest { - self.commitment - } - - /// Returns a reference to the procedure information stored in this account code header. - pub fn procedures(&self) -> &[AccountProcedureInfo] { - &self.procedures - } - - /// Converts procedure information in this [AccountCodeHeader] into a vector of field elements. - /// - /// This is done by first converting each procedure into 8 field elements as follows: - /// ```text - /// [PROCEDURE_MAST_ROOT, storage_offset, storage_size, 0, 0] - /// ``` - /// And then concatenating the resulting elements into a single vector. - pub fn as_elements(&self) -> Vec { - procedures_as_elements(&self.procedures) - } -} - -impl From for AccountCodeHeader { - fn from(value: AccountCode) -> Self { - AccountCodeHeader::new(value.procedures) - } -} - -impl Serializable for AccountCodeHeader { - fn write_into(&self, target: &mut W) { - target.write(&self.procedures); - } -} - -impl Deserializable for AccountCodeHeader { - fn read_from( - source: &mut R, - ) -> Result { - let procedures: Vec = source.read()?; - let commitment = build_procedure_commitment(&procedures); - - Ok(AccountCodeHeader { procedures, commitment }) - } -} diff --git a/crates/miden-protocol/src/account/code/mod.rs b/crates/miden-protocol/src/account/code/mod.rs index dcb71e9425..74fec4a01c 100644 --- a/crates/miden-protocol/src/account/code/mod.rs +++ b/crates/miden-protocol/src/account/code/mod.rs @@ -283,7 +283,9 @@ impl Deserializable for AccountCode { fn read_from(source: &mut R) -> Result { let module = Arc::new(MastForest::read_from(source)?); let num_procedures = (source.read_u8()? as usize) + 1; - let procedures = source.read_many::(num_procedures)?; + let procedures = source + .read_many_iter(num_procedures)? + .collect::, _>>()?; Ok(Self::from_parts(module, procedures)) } @@ -336,7 +338,7 @@ impl AccountProcedureBuilder { fn add_auth_component(&mut self, component: &AccountComponent) -> Result<(), AccountError> { let mut auth_proc_count = 0; - for (proc_root, is_auth) in component.get_procedures() { + for (proc_root, is_auth) in component.procedures() { self.add_procedure(proc_root); if is_auth { @@ -356,20 +358,19 @@ impl AccountProcedureBuilder { } fn add_component(&mut self, component: &AccountComponent) -> Result<(), AccountError> { - for (proc_mast_root, is_auth) in component.get_procedures() { + for (proc_root, is_auth) in component.procedures() { if is_auth { return Err(AccountError::AccountCodeMultipleAuthComponents); } - self.add_procedure(proc_mast_root); + self.add_procedure(proc_root); } Ok(()) } - fn add_procedure(&mut self, proc_mast_root: Word) { + fn add_procedure(&mut self, proc_root: AccountProcedureRoot) { // Allow procedures with the same MAST root from different components, but only add them // once. - let proc_root = AccountProcedureRoot::from_raw(proc_mast_root); if !self.procedures.contains(&proc_root) { self.procedures.push(proc_root); } @@ -446,7 +447,7 @@ mod tests { #[test] fn test_account_code_no_auth_component() { let library = Assembler::default().assemble_library([CODE]).unwrap(); - let metadata = AccountComponentMetadata::new("test::no_auth").with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::no_auth", AccountType::all()); let component = AccountComponent::new(library, vec![], metadata).unwrap(); let err = @@ -472,18 +473,19 @@ mod tests { use miden_assembly::Assembler; let code_with_multiple_auth = " + @auth_script pub proc auth_basic push.1 drop end + @auth_script pub proc auth_secondary push.0 drop end "; let library = Assembler::default().assemble_library([code_with_multiple_auth]).unwrap(); - let metadata = - AccountComponentMetadata::new("test::multiple_auth").with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::multiple_auth", AccountType::all()); let component = AccountComponent::new(library, vec![], metadata).unwrap(); let err = diff --git a/crates/miden-protocol/src/account/code/procedure.rs b/crates/miden-protocol/src/account/code/procedure.rs index fbef026007..a88fde5b0d 100644 --- a/crates/miden-protocol/src/account/code/procedure.rs +++ b/crates/miden-protocol/src/account/code/procedure.rs @@ -3,7 +3,7 @@ use alloc::sync::Arc; use miden_core::mast::MastForest; use miden_core::prettier::PrettyPrint; -use miden_processor::{MastNode, MastNodeExt, MastNodeId}; +use miden_processor::mast::{MastNode, MastNodeExt, MastNodeId}; use miden_protocol_macros::WordWrapper; use super::Felt; diff --git a/crates/miden-protocol/src/account/component/code.rs b/crates/miden-protocol/src/account/component/code.rs index d7c5113a7f..af4517b7d8 100644 --- a/crates/miden-protocol/src/account/component/code.rs +++ b/crates/miden-protocol/src/account/component/code.rs @@ -1,6 +1,8 @@ use miden_assembly::Library; -use miden_processor::MastForest; +use miden_assembly::library::ProcedureExport; +use miden_processor::mast::{MastForest, MastNodeExt}; +use crate::account::AccountProcedureRoot; use crate::vm::AdviceMap; // ACCOUNT COMPONENT CODE @@ -26,6 +28,22 @@ impl AccountComponentCode { self.0 } + /// Returns an iterator over the [`AccountProcedureRoot`]s of this component's exported + /// procedures. + pub fn procedure_roots(&self) -> impl Iterator + '_ { + self.0.exports().filter_map(|export| { + export.as_procedure().map(|proc_export| { + let digest = self.0.mast_forest()[proc_export.node].digest(); + AccountProcedureRoot::from_raw(digest) + }) + }) + } + + /// Returns the procedure exports of this component. + pub fn exports(&self) -> impl Iterator + '_ { + self.0.exports().filter_map(|export| export.as_procedure()) + } + /// Returns a new [AccountComponentCode] with the provided advice map entries merged into the /// underlying [Library]'s [MastForest]. /// diff --git a/crates/miden-protocol/src/account/component/metadata/mod.rs b/crates/miden-protocol/src/account/component/metadata/mod.rs index 2305b67c77..b02c007014 100644 --- a/crates/miden-protocol/src/account/component/metadata/mod.rs +++ b/crates/miden-protocol/src/account/component/metadata/mod.rs @@ -2,13 +2,18 @@ use alloc::collections::{BTreeMap, BTreeSet}; use alloc::string::{String, ToString}; use core::str::FromStr; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; use miden_mast_package::{Package, SectionId}; -use miden_processor::DeserializationError; use semver::Version; use super::{AccountType, SchemaRequirement, StorageSchema, StorageValueName}; use crate::errors::AccountError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT COMPONENT METADATA // ================================================================================================ @@ -36,12 +41,11 @@ use crate::errors::AccountError; /// ``` /// use std::collections::BTreeMap; /// -/// use miden_protocol::account::StorageSlotName; /// use miden_protocol::account::component::{ /// AccountComponentMetadata, /// FeltSchema, /// InitStorageData, -/// SchemaTypeId, +/// SchemaType, /// StorageSchema, /// StorageSlotSchema, /// StorageValueName, @@ -49,6 +53,7 @@ use crate::errors::AccountError; /// WordSchema, /// WordValue, /// }; +/// use miden_protocol::account::{AccountType, StorageSlotName}; /// /// let slot_name = StorageSlotName::new("demo::test_value")?; /// @@ -64,7 +69,7 @@ use crate::errors::AccountError; /// StorageSlotSchema::Value(ValueSlotSchema::new(Some("demo slot".into()), word)), /// )])?; /// -/// let metadata = AccountComponentMetadata::new("test name") +/// let metadata = AccountComponentMetadata::new("test name", AccountType::all()) /// .with_description("description of the component") /// .with_storage_schema(storage_schema); /// @@ -100,21 +105,23 @@ pub struct AccountComponentMetadata { } impl AccountComponentMetadata { - /// Create a new [AccountComponentMetadata] with the given name. + /// Create a new [AccountComponentMetadata] with the given name and supported account types. /// /// Other fields are initialized to sensible defaults: /// - `description`: empty string /// - `version`: 1.0.0 - /// - `supported_types`: empty set /// - `storage_schema`: default (empty) /// /// Use the `with_*` mutator methods to customize these fields. - pub fn new(name: impl Into) -> Self { + pub fn new( + name: impl Into, + supported_types: impl IntoIterator, + ) -> Self { Self { name: name.into(), description: String::new(), version: Version::new(1, 0, 0), - supported_types: BTreeSet::new(), + supported_types: supported_types.into_iter().collect(), storage_schema: StorageSchema::default(), } } @@ -131,38 +138,6 @@ impl AccountComponentMetadata { self } - /// Adds a supported account type to the component. - pub fn with_supported_type(mut self, account_type: AccountType) -> Self { - self.supported_types.insert(account_type); - self - } - - /// Sets the supported account types of the component. - pub fn with_supported_types(mut self, supported_types: BTreeSet) -> Self { - self.supported_types = supported_types; - self - } - - /// Sets the component to support all account types. - pub fn with_supports_all_types(mut self) -> Self { - self.supported_types.extend([ - AccountType::FungibleFaucet, - AccountType::NonFungibleFaucet, - AccountType::RegularAccountImmutableCode, - AccountType::RegularAccountUpdatableCode, - ]); - self - } - - /// Sets the component to support regular account types (immutable and updatable code). - pub fn with_supports_regular_types(mut self) -> Self { - self.supported_types.extend([ - AccountType::RegularAccountImmutableCode, - AccountType::RegularAccountUpdatableCode, - ]); - self - } - /// Sets the storage schema of the component. pub fn with_storage_schema(mut self, schema: StorageSchema) -> Self { self.storage_schema = schema; diff --git a/crates/miden-protocol/src/account/component/mod.rs b/crates/miden-protocol/src/account/component/mod.rs index f8d72039f9..4b90cc39e1 100644 --- a/crates/miden-protocol/src/account/component/mod.rs +++ b/crates/miden-protocol/src/account/component/mod.rs @@ -2,6 +2,7 @@ use alloc::collections::BTreeSet; use alloc::vec::Vec; use miden_mast_package::{MastArtifact, Package}; +use miden_processor::mast::MastNodeExt; mod metadata; pub use metadata::*; @@ -12,11 +13,14 @@ pub use storage::*; mod code; pub use code::AccountComponentCode; -use crate::account::{AccountType, StorageSlot}; +use crate::account::{AccountProcedureRoot, AccountType, StorageSlot}; use crate::assembly::Path; use crate::errors::AccountError; use crate::{MastForest, Word}; +/// The attribute name used to mark the authentication procedure in an account component. +const AUTH_SCRIPT_ATTRIBUTE: &str = "auth_script"; + // ACCOUNT COMPONENT // ================================================================================================ @@ -192,16 +196,24 @@ impl AccountComponent { self.metadata.supported_types().contains(&account_type) } - /// Returns a vector of tuples (digest, is_auth) for all procedures in this component. - pub fn get_procedures(&self) -> Vec<(Word, bool)> { - let mut procedures = Vec::new(); - for module in self.code.as_library().module_infos() { - for (_, procedure_info) in module.procedures() { - let is_auth = procedure_info.name.starts_with("auth_"); - procedures.push((procedure_info.digest, is_auth)); - } - } - procedures + /// Returns an iterator over ([`AccountProcedureRoot`], is_auth) for all procedures in this + /// component. + /// + /// A procedure is considered an authentication procedure if it has the `@auth_script` + /// attribute. + pub fn procedures(&self) -> impl Iterator + '_ { + let library = self.code.as_library(); + library.exports().filter_map(|export| { + export.as_procedure().map(|proc_export| { + let digest = library + .mast_forest() + .get_node_by_id(proc_export.node) + .expect("export node not in the forest") + .digest(); + let is_auth = proc_export.attributes.has(AUTH_SCRIPT_ATTRIBUTE); + (AccountProcedureRoot::from_raw(digest), is_auth) + }) + }) } /// Returns the digest of the procedure with the specified path, or `None` if it was not found @@ -223,7 +235,6 @@ mod tests { use alloc::sync::Arc; use miden_assembly::Assembler; - use miden_core::utils::Serializable; use miden_mast_package::{ MastArtifact, Package, @@ -236,6 +247,7 @@ mod tests { use super::*; use crate::testing::account_code::CODE; + use crate::utils::serde::Serializable; #[test] fn test_extract_metadata_from_package() { @@ -243,10 +255,12 @@ mod tests { let library = Assembler::default().assemble_library([CODE]).unwrap(); // Test with metadata - let metadata = AccountComponentMetadata::new("test_component") - .with_description("A test component") - .with_version(Version::new(1, 0, 0)) - .with_supported_type(AccountType::RegularAccountImmutableCode); + let metadata = AccountComponentMetadata::new( + "test_component", + [AccountType::RegularAccountImmutableCode], + ) + .with_description("A test component") + .with_version(Version::new(1, 0, 0)); let metadata_bytes = metadata.to_bytes(); let package_with_metadata = Package { @@ -295,10 +309,9 @@ mod tests { let component_code = AccountComponentCode::from(library.clone()); // Create metadata for the component - let metadata = AccountComponentMetadata::new("test_component") + let metadata = AccountComponentMetadata::new("test_component", AccountType::regular()) .with_description("A test component") - .with_version(Version::new(1, 0, 0)) - .with_supports_regular_types(); + .with_version(Version::new(1, 0, 0)); // Test with empty init data - this tests the complete workflow: // Library + Metadata -> AccountComponent diff --git a/crates/miden-protocol/src/account/component/storage/init_storage_data.rs b/crates/miden-protocol/src/account/component/storage/init_storage_data.rs index 38494ca5af..6a0f560d55 100644 --- a/crates/miden-protocol/src/account/component/storage/init_storage_data.rs +++ b/crates/miden-protocol/src/account/component/storage/init_storage_data.rs @@ -8,7 +8,7 @@ use super::StorageValueName; use super::value_name::StorageValueNameError; use crate::account::StorageSlotName; use crate::errors::StorageSlotNameError; -use crate::{Felt, FieldElement, Word}; +use crate::{Felt, Word}; /// A word value provided via [`InitStorageData`]. /// diff --git a/crates/miden-protocol/src/account/component/storage/mod.rs b/crates/miden-protocol/src/account/component/storage/mod.rs index 06b24bd768..41745782bf 100644 --- a/crates/miden-protocol/src/account/component/storage/mod.rs +++ b/crates/miden-protocol/src/account/component/storage/mod.rs @@ -5,7 +5,7 @@ mod value_name; pub use value_name::{StorageValueName, StorageValueNameError}; mod type_registry; -pub use type_registry::{SchemaRequirement, SchemaTypeError, SchemaTypeId}; +pub use type_registry::{SchemaRequirement, SchemaType, SchemaTypeError}; mod init_storage_data; pub use init_storage_data::{InitStorageData, InitStorageDataError, WordValue}; diff --git a/crates/miden-protocol/src/account/component/storage/schema/felt.rs b/crates/miden-protocol/src/account/component/storage/schema/felt.rs index 25c3d62f87..1bf4a4ff9a 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/felt.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/felt.rs @@ -1,15 +1,19 @@ use alloc::collections::BTreeMap; use alloc::string::{String, ToString}; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - -use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaTypeId}; +use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaType}; use super::super::{InitStorageData, StorageValueName, WordValue}; use super::validate_description_ascii; +use crate::Felt; use crate::account::StorageSlotName; use crate::errors::ComponentMetadataError; -use crate::{Felt, FieldElement}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // FELT SCHEMA // ================================================================================================ @@ -25,13 +29,13 @@ use crate::{Felt, FieldElement}; pub struct FeltSchema { name: Option, description: Option, - r#type: SchemaTypeId, + r#type: SchemaType, default_value: Option, } impl FeltSchema { /// Creates a new required typed felt field. - pub fn new_typed(r#type: SchemaTypeId, name: impl Into) -> Self { + pub fn new_typed(r#type: SchemaType, name: impl Into) -> Self { FeltSchema { name: Some(name.into()), description: None, @@ -42,7 +46,7 @@ impl FeltSchema { /// Creates a new typed felt field with a default value. pub fn new_typed_with_default( - r#type: SchemaTypeId, + r#type: SchemaType, name: impl Into, default_value: Felt, ) -> Self { @@ -59,34 +63,39 @@ impl FeltSchema { FeltSchema { name: None, description: None, - r#type: SchemaTypeId::void(), + r#type: SchemaType::void(), default_value: None, } } - /// Creates a new required felt field typed as [`SchemaTypeId::native_felt()`]. + /// Creates a new required felt field typed as [`SchemaType::native_felt()`]. pub fn felt(name: impl Into) -> Self { - Self::new_typed(SchemaTypeId::native_felt(), name) + Self::new_typed(SchemaType::native_felt(), name) } - /// Creates a new required felt field typed as [`SchemaTypeId::native_word()`]. + /// Creates a new required felt field typed as [`SchemaType::native_word()`]. pub fn word(name: impl Into) -> Self { - Self::new_typed(SchemaTypeId::native_word(), name) + Self::new_typed(SchemaType::native_word(), name) } - /// Creates a new required felt field typed as [`SchemaTypeId::u8()`]. + /// Creates a new required felt field typed as [`SchemaType::u8()`]. pub fn u8(name: impl Into) -> Self { - Self::new_typed(SchemaTypeId::u8(), name) + Self::new_typed(SchemaType::u8(), name) } - /// Creates a new required felt field typed as [`SchemaTypeId::u16()`]. + /// Creates a new required felt field typed as [`SchemaType::u16()`]. pub fn u16(name: impl Into) -> Self { - Self::new_typed(SchemaTypeId::u16(), name) + Self::new_typed(SchemaType::u16(), name) } - /// Creates a new required felt field typed as [`SchemaTypeId::u32()`]. + /// Creates a new required felt field typed as [`SchemaType::u32()`]. pub fn u32(name: impl Into) -> Self { - Self::new_typed(SchemaTypeId::u32(), name) + Self::new_typed(SchemaType::u32(), name) + } + + /// Creates a new required felt field typed as [`SchemaType::bool()`]. + pub fn bool(name: impl Into) -> Self { + Self::new_typed(SchemaType::bool(), name) } /// Sets the default value of the [`FeltSchema`] and returns `self`. @@ -106,7 +115,7 @@ impl FeltSchema { } /// Returns the felt type. - pub fn felt_type(&self) -> SchemaTypeId { + pub fn felt_type(&self) -> SchemaType { self.r#type.clone() } @@ -127,7 +136,7 @@ impl FeltSchema { slot_prefix: StorageValueName, requirements: &mut BTreeMap, ) -> Result<(), ComponentMetadataError> { - if self.r#type == SchemaTypeId::void() { + if self.r#type == SchemaType::void() { return Ok(()); } @@ -203,7 +212,7 @@ impl FeltSchema { } } - if self.r#type == SchemaTypeId::void() { + if self.r#type == SchemaType::void() { return Ok(Felt::ZERO); } @@ -234,7 +243,7 @@ impl FeltSchema { )); } - if self.r#type == SchemaTypeId::void() { + if self.r#type == SchemaType::void() { if self.name.is_some() { return Err(ComponentMetadataError::InvalidSchema( "void felt elements must be unnamed".into(), @@ -285,7 +294,7 @@ impl Deserializable for FeltSchema { fn read_from(source: &mut R) -> Result { let name = Option::::read_from(source)?; let description = Option::::read_from(source)?; - let r#type = SchemaTypeId::read_from(source)?; + let r#type = SchemaType::read_from(source)?; let default_value = Option::::read_from(source)?; Ok(FeltSchema { name, description, r#type, default_value }) } diff --git a/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs b/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs index bcc2714374..065d5877ca 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs @@ -3,14 +3,18 @@ use alloc::collections::BTreeMap; use alloc::string::String; use alloc::vec::Vec; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - use super::super::{InitStorageData, StorageValueName}; use super::{WordSchema, parse_storage_value_with_schema, validate_description_ascii}; use crate::Word; -use crate::account::{StorageMap, StorageSlotName}; +use crate::account::{StorageMap, StorageMapKey, StorageSlotName}; use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // MAP SLOT SCHEMA // ================================================================================================ @@ -86,8 +90,10 @@ impl MapSlotSchema { return Ok(StorageMap::new()); } - StorageMap::with_entries(entries) - .map_err(|err| ComponentMetadataError::StorageMapHasDuplicateKeys(Box::new(err))) + StorageMap::with_entries( + entries.into_iter().map(|(key, value)| (StorageMapKey::from_raw(key), value)), + ) + .map_err(|err| ComponentMetadataError::StorageMapHasDuplicateKeys(Box::new(err))) } pub fn key_schema(&self) -> &WordSchema { diff --git a/crates/miden-protocol/src/account/component/storage/schema/mod.rs b/crates/miden-protocol/src/account/component/storage/schema/mod.rs index 35f87d89a0..fd93070c9d 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/mod.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/mod.rs @@ -2,14 +2,18 @@ use alloc::collections::BTreeMap; use alloc::string::ToString; use alloc::vec::Vec; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - use super::type_registry::SchemaRequirement; use super::{InitStorageData, StorageValueName}; use crate::account::{StorageSlot, StorageSlotName}; use crate::crypto::utils::bytes_to_elements_with_padding; use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Hasher, Word}; mod felt; diff --git a/crates/miden-protocol/src/account/component/storage/schema/parse.rs b/crates/miden-protocol/src/account/component/storage/schema/parse.rs index 989baddc1e..c3e993c6bd 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/parse.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/parse.rs @@ -1,11 +1,11 @@ use alloc::string::String; use alloc::vec::Vec; -use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaTypeId}; +use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaType}; use super::super::{StorageValueName, WordValue}; use super::{FeltSchema, WordSchema}; use crate::errors::ComponentMetadataError; -use crate::{Felt, FieldElement, Word}; +use crate::{Felt, Word}; // HELPER FUNCTIONS // ================================================================================================ @@ -24,7 +24,7 @@ pub(crate) fn parse_storage_value_with_schema( parse_composite_elements(value, elements, slot_prefix)? }, (WordSchema::Composite { .. }, WordValue::Atomic(value)) => SCHEMA_TYPE_REGISTRY - .try_parse_word(&SchemaTypeId::native_word(), value) + .try_parse_word(&SchemaType::native_word(), value) .map_err(|err| { ComponentMetadataError::InvalidInitStorageValue( slot_prefix.clone(), @@ -38,7 +38,7 @@ pub(crate) fn parse_storage_value_with_schema( } fn parse_simple_word_value( - schema_type: &SchemaTypeId, + schema_type: &SchemaType, raw_value: &WordValue, slot_prefix: &StorageValueName, ) -> Result { @@ -55,7 +55,7 @@ fn parse_simple_word_value( let felts: Vec = elements .iter() .map(|element| { - SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaTypeId::native_felt(), element) + SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaType::native_felt(), element) }) .collect::>() .map_err(|err| { diff --git a/crates/miden-protocol/src/account/component/storage/schema/slot.rs b/crates/miden-protocol/src/account/component/storage/schema/slot.rs index da4c705235..6677b5eb05 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/slot.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/slot.rs @@ -1,14 +1,18 @@ use alloc::collections::BTreeMap; use alloc::string::String; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - -use super::super::type_registry::{SchemaRequirement, SchemaTypeId}; +use super::super::type_registry::{SchemaRequirement, SchemaType}; use super::super::{InitStorageData, StorageValueName}; use super::{MapSlotSchema, ValueSlotSchema, WordSchema}; use crate::account::{StorageSlot, StorageSlotName}; use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // STORAGE SLOT SCHEMA // ================================================================================================ @@ -25,7 +29,7 @@ pub enum StorageSlotSchema { impl StorageSlotSchema { /// Creates a value slot schema with the given description and word schema. /// - /// Accepts anything convertible to [`WordSchema`]: a [`SchemaTypeId`] for simple typed slots, + /// Accepts anything convertible to [`WordSchema`]: a [`SchemaType`] for simple typed slots, /// a `[FeltSchema; 4]` for composite slots, or a [`WordSchema`] directly. pub fn value(description: impl Into, word: impl Into) -> Self { Self::Value(ValueSlotSchema::new(Some(description.into()), word.into())) @@ -34,8 +38,8 @@ impl StorageSlotSchema { /// Creates a map slot schema with the given description and simple key/value types. pub fn map( description: impl Into, - key_type: SchemaTypeId, - value_type: SchemaTypeId, + key_type: SchemaType, + value_type: SchemaType, ) -> Self { Self::Map(MapSlotSchema::new( Some(description.into()), diff --git a/crates/miden-protocol/src/account/component/storage/schema/tests.rs b/crates/miden-protocol/src/account/component/storage/schema/tests.rs index 89c5b62527..99161c5994 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/tests.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/tests.rs @@ -1,13 +1,13 @@ use alloc::collections::BTreeMap; -use super::super::{InitStorageData, SchemaTypeId}; +use super::super::{InitStorageData, SchemaType}; use super::{FeltSchema, MapSlotSchema, ValueSlotSchema, WordSchema}; -use crate::account::{StorageMap, StorageSlotName}; +use crate::account::{StorageMap, StorageMapKey, StorageSlotName}; use crate::{Felt, Word}; #[test] fn map_slot_schema_default_values_returns_map() { - let word_schema = WordSchema::new_simple(SchemaTypeId::native_word()); + let word_schema = WordSchema::new_simple(SchemaType::native_word()); let mut default_values = BTreeMap::new(); default_values.insert( Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), @@ -35,7 +35,7 @@ fn value_slot_schema_exposes_felt_schema_types() { FeltSchema::u8("a"), FeltSchema::u16("b"), FeltSchema::u32("c"), - FeltSchema::new_typed(SchemaTypeId::new("felt").unwrap(), "d"), + FeltSchema::new_typed(SchemaType::new("felt").unwrap(), "d"), ]; let slot = ValueSlotSchema::new(None, WordSchema::new_value(felt_values)); @@ -43,15 +43,15 @@ fn value_slot_schema_exposes_felt_schema_types() { panic!("expected composite word schema"); }; - assert_eq!(value[0].felt_type(), SchemaTypeId::u8()); - assert_eq!(value[1].felt_type(), SchemaTypeId::u16()); - assert_eq!(value[2].felt_type(), SchemaTypeId::u32()); - assert_eq!(value[3].felt_type(), SchemaTypeId::new("felt").unwrap()); + assert_eq!(value[0].felt_type(), SchemaType::u8()); + assert_eq!(value[1].felt_type(), SchemaType::u16()); + assert_eq!(value[2].felt_type(), SchemaType::u32()); + assert_eq!(value[3].felt_type(), SchemaType::new("felt").unwrap()); } #[test] fn map_slot_schema_key_and_value_types() { - let key_schema = WordSchema::new_simple(SchemaTypeId::new("sampling::Key").unwrap()); + let key_schema = WordSchema::new_simple(SchemaType::new("sampling::Key").unwrap()); let value_schema = WordSchema::new_value([ FeltSchema::felt("a"), @@ -64,20 +64,20 @@ fn map_slot_schema_key_and_value_types() { assert_eq!( slot.key_schema(), - &WordSchema::new_simple(SchemaTypeId::new("sampling::Key").unwrap()) + &WordSchema::new_simple(SchemaType::new("sampling::Key").unwrap()) ); let WordSchema::Composite { value } = slot.value_schema() else { panic!("expected composite word schema for map values"); }; for felt in value.iter() { - assert_eq!(felt.felt_type(), SchemaTypeId::native_felt()); + assert_eq!(felt.felt_type(), SchemaType::native_felt()); } } #[test] fn value_slot_schema_accepts_typed_word_init_value() { - let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaTypeId::native_word())); + let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaType::native_word())); let slot_name: StorageSlotName = "demo::slot".parse().unwrap(); let mut init_data = InitStorageData::default(); @@ -90,7 +90,7 @@ fn value_slot_schema_accepts_typed_word_init_value() { #[test] fn value_slot_schema_accepts_felt_typed_word_init_value() { - let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaTypeId::u8())); + let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaType::u8())); let slot_name: StorageSlotName = "demo::u8_word".parse().unwrap(); let mut init_data = InitStorageData::default(); @@ -120,7 +120,7 @@ fn value_slot_schema_accepts_typed_felt_init_value_in_composed_word() { #[test] fn map_slot_schema_accepts_typed_map_init_value() { - let word_schema = WordSchema::new_simple(SchemaTypeId::native_word()); + let word_schema = WordSchema::new_simple(SchemaType::native_word()); let slot = MapSlotSchema::new(None, None, word_schema.clone(), word_schema); let slot_name: StorageSlotName = "demo::map".parse().unwrap(); @@ -131,7 +131,7 @@ fn map_slot_schema_accepts_typed_map_init_value() { let built = slot.try_build_map(&init_data, &slot_name).unwrap(); let expected = StorageMap::with_entries([( - Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), + StorageMapKey::from_array([1, 0, 0, 0]), Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), )]) .unwrap(); @@ -140,7 +140,7 @@ fn map_slot_schema_accepts_typed_map_init_value() { #[test] fn map_slot_schema_missing_init_value_defaults_to_empty_map() { - let word_schema = WordSchema::new_simple(SchemaTypeId::native_word()); + let word_schema = WordSchema::new_simple(SchemaType::native_word()); let slot = MapSlotSchema::new(None, None, word_schema.clone(), word_schema); let built = slot .try_build_map(&InitStorageData::default(), &"demo::map".parse().unwrap()) diff --git a/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs b/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs index bbd88be50b..c472a45778 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs @@ -1,15 +1,19 @@ use alloc::collections::BTreeMap; use alloc::string::String; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - use super::super::type_registry::SchemaRequirement; use super::super::{InitStorageData, StorageValueName}; use super::{WordSchema, validate_description_ascii}; use crate::Word; use crate::account::StorageSlotName; use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // VALUE SLOT SCHEMA // ================================================================================================ diff --git a/crates/miden-protocol/src/account/component/storage/schema/word.rs b/crates/miden-protocol/src/account/component/storage/schema/word.rs index 02af55e6aa..de609ebb25 100644 --- a/crates/miden-protocol/src/account/component/storage/schema/word.rs +++ b/crates/miden-protocol/src/account/component/storage/schema/word.rs @@ -1,15 +1,19 @@ use alloc::collections::BTreeMap; use alloc::string::{String, ToString}; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - -use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaTypeId}; +use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaType}; use super::super::{InitStorageData, StorageValueName}; use super::FeltSchema; use crate::account::StorageSlotName; use crate::errors::ComponentMetadataError; -use crate::{Felt, FieldElement, Word}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Word}; // WORD SCHEMA // ================================================================================================ @@ -23,7 +27,7 @@ use crate::{Felt, FieldElement, Word}; pub enum WordSchema { /// A whole-word typed value supplied at instantiation time. Simple { - r#type: SchemaTypeId, + r#type: SchemaType, default_value: Option, }, /// A composed word that may mix defaults and typed fields. @@ -31,11 +35,11 @@ pub enum WordSchema { } impl WordSchema { - pub fn new_simple(r#type: SchemaTypeId) -> Self { + pub fn new_simple(r#type: SchemaType) -> Self { WordSchema::Simple { r#type, default_value: None } } - pub fn new_simple_with_default(r#type: SchemaTypeId, default_value: Word) -> Self { + pub fn new_simple_with_default(r#type: SchemaType, default_value: Word) -> Self { WordSchema::Simple { r#type, default_value: Some(default_value), @@ -53,11 +57,11 @@ impl WordSchema { } } - /// Returns the schema type identifier associated with whole-word init-supplied values. - pub fn word_type(&self) -> SchemaTypeId { + /// Returns the schema type associated with whole-word init-supplied values. + pub fn word_type(&self) -> SchemaType { match self { WordSchema::Simple { r#type, .. } => r#type.clone(), - WordSchema::Composite { .. } => SchemaTypeId::native_word(), + WordSchema::Composite { .. } => SchemaType::native_word(), } } @@ -69,7 +73,7 @@ impl WordSchema { ) -> Result<(), ComponentMetadataError> { match self { WordSchema::Simple { r#type, default_value } => { - if *r#type == SchemaTypeId::void() { + if *r#type == SchemaType::void() { return Ok(()); } @@ -165,7 +169,7 @@ impl WordSchema { super::parse_storage_value_with_schema(self, value, &slot_prefix) }, None => { - if *r#type == SchemaTypeId::void() { + if *r#type == SchemaType::void() { Ok(Word::empty()) } else { default_value.as_ref().copied().ok_or_else(|| { @@ -261,7 +265,7 @@ impl Deserializable for WordSchema { let tag = source.read_u8()?; match tag { 0 => { - let r#type = SchemaTypeId::read_from(source)?; + let r#type = SchemaType::read_from(source)?; let default_value = Option::::read_from(source)?; Ok(WordSchema::Simple { r#type, default_value }) }, @@ -276,8 +280,8 @@ impl Deserializable for WordSchema { } } -impl From for WordSchema { - fn from(r#type: SchemaTypeId) -> Self { +impl From for WordSchema { + fn from(r#type: SchemaType) -> Self { WordSchema::new_simple(r#type) } } @@ -290,6 +294,6 @@ impl From<[FeltSchema; 4]> for WordSchema { impl From<[Felt; 4]> for WordSchema { fn from(value: [Felt; 4]) -> Self { - WordSchema::new_simple_with_default(SchemaTypeId::native_word(), Word::from(value)) + WordSchema::new_simple_with_default(SchemaType::native_word(), Word::from(value)) } } diff --git a/crates/miden-protocol/src/account/component/storage/toml/mod.rs b/crates/miden-protocol/src/account/component/storage/toml/mod.rs index b26958aefb..a5850d1afb 100644 --- a/crates/miden-protocol/src/account/component/storage/toml/mod.rs +++ b/crates/miden-protocol/src/account/component/storage/toml/mod.rs @@ -18,7 +18,7 @@ use super::super::{ WordValue, }; use crate::account::component::storage::type_registry::SCHEMA_TYPE_REGISTRY; -use crate::account::component::{AccountComponentMetadata, SchemaTypeId}; +use crate::account::component::{AccountComponentMetadata, SchemaType}; use crate::account::{AccountType, StorageSlotName}; use crate::errors::ComponentMetadataError; @@ -69,10 +69,9 @@ impl AccountComponentMetadata { } let storage_schema = StorageSchema::new(fields)?; - Ok(Self::new(raw.name) + Ok(Self::new(raw.name, raw.supported_types) .with_description(raw.description) .with_version(raw.version) - .with_supported_types(raw.supported_types) .with_storage_schema(storage_schema)) } @@ -118,7 +117,7 @@ enum RawSlotType { #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(untagged)] enum RawWordType { - TypeIdentifier(SchemaTypeId), + TypeIdentifier(SchemaType), FeltSchemaArray(Vec), } @@ -463,7 +462,7 @@ impl RawStorageSlotSchema { impl WordValue { pub(super) fn try_parse_as_typed_word( &self, - schema_type: &SchemaTypeId, + schema_type: &SchemaType, slot_prefix: &StorageValueName, label: &str, ) -> Result { @@ -476,7 +475,7 @@ impl WordValue { let felts = elements .iter() .map(|element| { - SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaTypeId::native_felt(), element) + SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaType::native_felt(), element) }) .collect::, _>>() .map_err(ComponentMetadataError::StorageValueParsingError)?; @@ -493,7 +492,7 @@ impl WordValue { Ok(word) } - pub(super) fn from_word(schema_type: &SchemaTypeId, word: Word) -> Self { + pub(super) fn from_word(schema_type: &SchemaType, word: Word) -> Self { WordValue::Atomic(SCHEMA_TYPE_REGISTRY.display_word(schema_type, word).value().to_string()) } } diff --git a/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs b/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs index 765195c72e..b25eb91538 100644 --- a/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs +++ b/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs @@ -5,7 +5,7 @@ use serde::ser::{Error as SerError, SerializeStruct}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use super::super::type_registry::SCHEMA_TYPE_REGISTRY; -use super::super::{FeltSchema, SchemaTypeId, WordValue}; +use super::super::{FeltSchema, SchemaType, WordValue}; // FELT SCHEMA SERIALIZATION // ================================================================================================ @@ -15,9 +15,9 @@ impl Serialize for FeltSchema { where S: Serializer, { - if self.felt_type() == SchemaTypeId::void() { + if self.felt_type() == SchemaType::void() { let mut state = serializer.serialize_struct("FeltSchema", 2)?; - state.serialize_field("type", &SchemaTypeId::void())?; + state.serialize_field("type", &SchemaType::void())?; if let Some(description) = self.description() { state.serialize_field("description", description)?; } @@ -33,7 +33,7 @@ impl Serialize for FeltSchema { if let Some(description) = self.description() { state.serialize_field("description", description)?; } - if self.felt_type() != SchemaTypeId::native_felt() { + if self.felt_type() != SchemaType::native_felt() { state.serialize_field("type", &self.felt_type())?; } if let Some(default_value) = self.default_value() { @@ -61,12 +61,12 @@ impl<'de> Deserialize<'de> for FeltSchema { #[serde(default, rename = "default-value")] default_value: Option, #[serde(default, rename = "type")] - r#type: Option, + r#type: Option, } let raw = RawFeltSchema::deserialize(deserializer)?; - let felt_type = raw.r#type.unwrap_or_else(SchemaTypeId::native_felt); + let felt_type = raw.r#type.unwrap_or_else(SchemaType::native_felt); let description = raw.description.and_then(|description| { if description.trim().is_empty() { @@ -76,7 +76,7 @@ impl<'de> Deserialize<'de> for FeltSchema { } }); - if felt_type == SchemaTypeId::void() { + if felt_type == SchemaType::void() { if raw.name.is_some() { return Err(D::Error::custom("`type = \"void\"` elements must omit `name`")); } diff --git a/crates/miden-protocol/src/account/component/storage/toml/tests.rs b/crates/miden-protocol/src/account/component/storage/toml/tests.rs index 8ead885df5..221c18c0a6 100644 --- a/crates/miden-protocol/src/account/component/storage/toml/tests.rs +++ b/crates/miden-protocol/src/account/component/storage/toml/tests.rs @@ -1,7 +1,6 @@ use alloc::string::ToString; use core::error::Error; -use miden_air::FieldElement; use miden_core::{Felt, Word}; use crate::account::component::toml::init_storage_data::InitStorageDataError; @@ -9,14 +8,14 @@ use crate::account::component::{ AccountComponentMetadata, InitStorageData, InitStorageDataError as CoreInitStorageDataError, - SchemaTypeId, + SchemaType, StorageSlotSchema, StorageValueName, StorageValueNameError, WordSchema, WordValue, }; -use crate::account::{StorageSlotContent, StorageSlotName}; +use crate::account::{StorageMapKey, StorageSlotContent, StorageSlotName}; use crate::asset::TokenSymbol; use crate::errors::ComponentMetadataError; @@ -498,7 +497,7 @@ fn metadata_toml_round_trip_composed_slot_with_typed_fields() { .remove(&"demo::composed.a".parse::().unwrap()) .unwrap() .r#type, - SchemaTypeId::u16() + SchemaType::u16() ); let round_trip_toml = original.to_toml().expect("serialize to toml"); @@ -538,7 +537,7 @@ fn metadata_toml_round_trip_typed_slots() { _ => panic!("expected value slot"), }; - let typed_value = SchemaTypeId::native_word(); + let typed_value = SchemaType::native_word(); assert_eq!(value_slot.word(), &WordSchema::new_simple(typed_value.clone())); let map_slot = schema @@ -550,7 +549,7 @@ fn metadata_toml_round_trip_typed_slots() { _ => panic!("expected map slot"), }; - let pub_key_type = SchemaTypeId::new("miden::standards::auth::pub_key").unwrap(); + let pub_key_type = SchemaType::new("miden::standards::auth::pub_key").unwrap(); assert_eq!(map_slot.key_schema(), &WordSchema::new_simple(pub_key_type.clone())); assert_eq!(map_slot.value_schema(), &WordSchema::new_simple(pub_key_type)); @@ -676,8 +675,8 @@ fn extensive_schema_metadata_and_init_toml_example() { else { panic!("expected map slot schema"); }; - assert_eq!(default_map.key_schema(), &WordSchema::new_simple(SchemaTypeId::native_word())); - assert_eq!(default_map.value_schema(), &WordSchema::new_simple(SchemaTypeId::native_word())); + assert_eq!(default_map.key_schema(), &WordSchema::new_simple(SchemaType::native_word())); + assert_eq!(default_map.value_schema(), &WordSchema::new_simple(SchemaType::native_word())); // `type.key`/`type.value` parse as schema/type descriptors (not literal words). let typed_map_new_name = StorageSlotName::new("demo::typed_map_new").unwrap(); @@ -686,7 +685,7 @@ fn extensive_schema_metadata_and_init_toml_example() { else { panic!("expected map slot schema"); }; - assert_eq!(typed_map_new.value_schema(), &WordSchema::new_simple(SchemaTypeId::u16())); + assert_eq!(typed_map_new.value_schema(), &WordSchema::new_simple(SchemaType::u16())); assert!(matches!(typed_map_new.key_schema(), WordSchema::Composite { .. })); // used storage slots @@ -708,7 +707,7 @@ fn extensive_schema_metadata_and_init_toml_example() { .expect("symbol should be reported with a default value"); assert_eq!( symbol_requirement.r#type, - SchemaTypeId::new("miden::standards::fungible_faucets::metadata::token_symbol").unwrap() + SchemaType::new("miden::standards::fungible_faucets::metadata::token_symbol").unwrap() ); assert_eq!(symbol_requirement.default_value.as_deref(), Some("TST")); assert!( @@ -779,9 +778,12 @@ fn extensive_schema_metadata_and_init_toml_example() { panic!("expected map slot for static_map"); }; assert_eq!(static_map.num_entries(), 2); - assert_eq!(static_map.get(&Word::parse("0x1").unwrap()), Word::parse("0x10").unwrap()); assert_eq!( - static_map.get(&Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(2)])), + static_map.get(&StorageMapKey::from_raw(Word::parse("0x1").unwrap())), + Word::parse("0x10").unwrap() + ); + assert_eq!( + static_map.get(&StorageMapKey::from_array([0, 0, 0, 2])), Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(32)]) ); @@ -827,9 +829,8 @@ fn extensive_schema_metadata_and_init_toml_example() { }; assert_eq!(typed_map_new_contents.num_entries(), 2); - let key1 = Word::from([Felt::new(1), Felt::new(2), Felt::ZERO, Felt::ZERO]); assert_eq!( - typed_map_new_contents.get(&key1), + typed_map_new_contents.get(&StorageMapKey::from_array([1, 2, 0, 0])), Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(16)]) ); @@ -854,12 +855,18 @@ fn extensive_schema_metadata_and_init_toml_example() { panic!("expected map slot for static_map"); }; assert_eq!(static_map.num_entries(), 3); - assert_eq!(static_map.get(&Word::parse("0x1").unwrap()), Word::parse("0x99").unwrap()); assert_eq!( - static_map.get(&Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(2)])), + static_map.get(&StorageMapKey::from_raw(Word::parse("0x1").unwrap())), + Word::parse("0x99").unwrap() + ); + assert_eq!( + static_map.get(&StorageMapKey::from_array([0, 0, 0, 2])), Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(32)]) ); - assert_eq!(static_map.get(&Word::parse("0x3").unwrap()), Word::parse("0x30").unwrap()); + assert_eq!( + static_map.get(&StorageMapKey::from_raw(Word::parse("0x3").unwrap())), + Word::parse("0x30").unwrap() + ); } #[test] @@ -935,5 +942,5 @@ fn typed_map_supports_non_numeric_value_types() { let key = Word::parse("0x1").unwrap(); let symbol_felt: Felt = TokenSymbol::new("BTC").unwrap().into(); let expected_value = Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, symbol_felt]); - assert_eq!(map.get(&key), expected_value); + assert_eq!(map.get(&StorageMapKey::from_raw(key)), expected_value); } diff --git a/crates/miden-protocol/src/account/component/storage/type_registry.rs b/crates/miden-protocol/src/account/component/storage/type_registry.rs index 396d12eb08..6db8c9716c 100644 --- a/crates/miden-protocol/src/account/component/storage/type_registry.rs +++ b/crates/miden-protocol/src/account/component/storage/type_registry.rs @@ -3,14 +3,20 @@ use alloc::collections::BTreeMap; use alloc::string::{String, ToString}; use core::error::Error; use core::fmt::{self, Display}; +use core::str::FromStr; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_core::{Felt, FieldElement, Word}; -use miden_processor::DeserializationError; +use miden_core::{Felt, Word}; use thiserror::Error; use crate::account::auth::{AuthScheme, PublicKey}; use crate::asset::TokenSymbol; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::utils::sync::LazyLock; /// A global registry for schema type converters. @@ -23,6 +29,7 @@ pub static SCHEMA_TYPE_REGISTRY: LazyLock = LazyLock::new(|| registry.register_felt_type::(); registry.register_felt_type::(); registry.register_felt_type::(); + registry.register_felt_type::(); registry.register_felt_type::(); registry.register_felt_type::(); registry.register_felt_type::(); @@ -43,24 +50,24 @@ pub enum SchemaTypeError { #[error("conversion error: {0}")] ConversionError(String), #[error("felt type ` {0}` not found in the type registry")] - FeltTypeNotFound(SchemaTypeId), + FeltTypeNotFound(SchemaType), #[error("invalid type name `{0}`: {1}")] InvalidTypeName(String, String), #[error("failed to parse input `{input}` as `{schema_type}`")] ParseError { input: String, - schema_type: SchemaTypeId, + schema_type: SchemaType, source: Box, }, #[error("word type ` {0}` not found in the type registry")] - WordTypeNotFound(SchemaTypeId), + WordTypeNotFound(SchemaType), } impl SchemaTypeError { /// Creates a [`SchemaTypeError::ParseError`]. pub fn parse( input: impl Into, - schema_type: SchemaTypeId, + schema_type: SchemaType, source: impl Error + Send + Sync + 'static, ) -> Self { SchemaTypeError::ParseError { @@ -74,9 +81,9 @@ impl SchemaTypeError { // SCHEMA TYPE // ================================================================================================ -/// A newtype wrapper around a `String`, representing a schema type identifier. +/// A newtype wrapper around a `String`, representing a schema type. /// -/// A valid schema identifier is a name in the style of Rust namespaces, composed of one or more +/// A valid schema type is a name in the style of Rust namespaces, composed of one or more /// non-empty segments separated by `::`. Each segment can contain only ASCII alphanumerics or `_`. /// /// Some examples: @@ -87,10 +94,10 @@ impl SchemaTypeError { #[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd)] #[cfg_attr(feature = "std", derive(::serde::Deserialize, ::serde::Serialize))] #[cfg_attr(feature = "std", serde(transparent))] -pub struct SchemaTypeId(String); +pub struct SchemaType(String); -impl SchemaTypeId { - /// Creates a new [`SchemaTypeId`] from a `String`. +impl SchemaType { + /// Creates a new [`SchemaType`] from a `String`. /// /// The name must follow a Rust-style namespace format, consisting of one or more segments /// (non-empty, and alphanumerical) separated by double-colon (`::`) delimiters. @@ -105,14 +112,14 @@ impl SchemaTypeId { if s.is_empty() { return Err(SchemaTypeError::InvalidTypeName( s.clone(), - "schema type identifier is empty".to_string(), + "schema type is empty".to_string(), )); } for segment in s.split("::") { if segment.is_empty() { return Err(SchemaTypeError::InvalidTypeName( s.clone(), - "empty segment in schema type identifier".to_string(), + "empty segment in schema type".to_string(), )); } if !segment.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') { @@ -125,51 +132,56 @@ impl SchemaTypeId { Ok(Self(s)) } - /// Returns the schema type identifier for the `void` type. + /// Returns the schema type for the `void` type. /// /// The `void` type always parses to `0` and is intended to model reserved or padding felts. - pub fn void() -> SchemaTypeId { - SchemaTypeId::new("void").expect("type is well formed") + pub fn void() -> SchemaType { + SchemaType::new("void").expect("type is well formed") } - /// Returns the schema type identifier for the native [`Felt`] type. - pub fn native_felt() -> SchemaTypeId { - SchemaTypeId::new("felt").expect("type is well formed") + /// Returns the schema type for the native [`Felt`] type. + pub fn native_felt() -> SchemaType { + SchemaType::new("felt").expect("type is well formed") } - /// Returns the schema type identifier for the native [`Word`] type. - pub fn native_word() -> SchemaTypeId { - SchemaTypeId::new("word").expect("type is well formed") + /// Returns the schema type for the native [`Word`] type. + pub fn native_word() -> SchemaType { + SchemaType::new("word").expect("type is well formed") } - /// Returns the schema type identifier for the native `u8` type. - pub fn u8() -> SchemaTypeId { - SchemaTypeId::new("u8").expect("type is well formed") + /// Returns the schema type for the native `u8` type. + pub fn u8() -> SchemaType { + SchemaType::new("u8").expect("type is well formed") } - /// Returns the schema type identifier for the native `u16` type. - pub fn u16() -> SchemaTypeId { - SchemaTypeId::new("u16").expect("type is well formed") + /// Returns the schema type for the native `u16` type. + pub fn u16() -> SchemaType { + SchemaType::new("u16").expect("type is well formed") } - /// Returns the schema type identifier for the native `u32` type. - pub fn u32() -> SchemaTypeId { - SchemaTypeId::new("u32").expect("type is well formed") + /// Returns the schema type for the native `u32` type. + pub fn u32() -> SchemaType { + SchemaType::new("u32").expect("type is well formed") } - /// Returns the schema type identifier for auth scheme identifiers. - pub fn auth_scheme() -> SchemaTypeId { - SchemaTypeId::new("miden::standards::auth::scheme").expect("type is well formed") + /// Returns the schema type for the native `bool` type. + pub fn bool() -> SchemaType { + SchemaType::new("bool").expect("type is well formed") } - /// Returns the schema type identifier for public key commitments. - pub fn pub_key() -> SchemaTypeId { - SchemaTypeId::new("miden::standards::auth::pub_key").expect("type is well formed") + /// Returns the schema type for auth scheme identifiers. + pub fn auth_scheme() -> SchemaType { + SchemaType::new("miden::standards::auth::scheme").expect("type is well formed") } - /// Returns the schema type identifier for fungible faucet token symbols. - pub fn token_symbol() -> SchemaTypeId { - SchemaTypeId::new("miden::standards::fungible_faucets::metadata::token_symbol") + /// Returns the schema type for public key commitments. + pub fn pub_key() -> SchemaType { + SchemaType::new("miden::standards::auth::pub_key").expect("type is well formed") + } + + /// Returns the schema type for fungible faucet token symbols. + pub fn token_symbol() -> SchemaType { + SchemaType::new("miden::standards::fungible_faucets::metadata::token_symbol") .expect("type is well formed") } @@ -179,23 +191,23 @@ impl SchemaTypeId { } } -impl Display for SchemaTypeId { +impl Display for SchemaType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } -impl Serializable for SchemaTypeId { +impl Serializable for SchemaType { fn write_into(&self, target: &mut W) { target.write(self.0.clone()) } } -impl Deserializable for SchemaTypeId { +impl Deserializable for SchemaType { fn read_from(source: &mut R) -> Result { let id: String = source.read()?; - SchemaTypeId::new(id).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + SchemaType::new(id).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } @@ -212,7 +224,7 @@ impl Deserializable for SchemaTypeId { #[derive(Debug, Clone, PartialEq, Eq)] pub struct SchemaRequirement { /// The expected type identifier. - pub r#type: SchemaTypeId, + pub r#type: SchemaType, /// An optional description providing additional context. pub description: Option, /// An optional default value, which can be overridden at component instantiation time. @@ -225,7 +237,7 @@ pub struct SchemaRequirement { /// Trait for converting a string into a single `Felt`. pub trait FeltType: Send + Sync { /// Returns the type identifier. - fn type_name() -> SchemaTypeId + fn type_name() -> SchemaType where Self: Sized; @@ -243,7 +255,7 @@ pub trait FeltType: Send + Sync { /// Trait for converting a string into a single `Word`. pub trait WordType: Send + Sync { /// Returns the type identifier. - fn type_name() -> SchemaTypeId + fn type_name() -> SchemaType where Self: Sized; @@ -262,7 +274,7 @@ impl WordType for T where T: FeltType, { - fn type_name() -> SchemaTypeId { + fn type_name() -> SchemaType { ::type_name() } @@ -285,12 +297,41 @@ where // FELT IMPLS FOR NATIVE TYPES // ================================================================================================ +/// A boolean felt type: `0` (false) or `1` (true). +struct Bool; + +impl FeltType for Bool { + fn type_name() -> SchemaType { + SchemaType::bool() + } + + fn parse_str(input: &str) -> Result { + match input { + "true" | "1" => Ok(Felt::new(1)), + "false" | "0" => Ok(Felt::new(0)), + _ => Err(SchemaTypeError::ConversionError(format!( + "invalid bool value `{input}`: expected `true`, `false`, `1`, or `0`" + ))), + } + } + + fn display_felt(value: Felt) -> Result { + match value.as_canonical_u64() { + 0 => Ok("false".into()), + 1 => Ok("true".into()), + other => Err(SchemaTypeError::ConversionError(format!( + "value `{other}` is not a valid bool (expected 0 or 1)" + ))), + } + } +} + /// A felt type that represents irrelevant elements in a storage schema definition. struct Void; impl FeltType for Void { - fn type_name() -> SchemaTypeId { - SchemaTypeId::void() + fn type_name() -> SchemaType { + SchemaType::void() } fn parse_str(input: &str) -> Result { @@ -310,8 +351,8 @@ impl FeltType for Void { } impl FeltType for u8 { - fn type_name() -> SchemaTypeId { - SchemaTypeId::u8() + fn type_name() -> SchemaType { + SchemaType::u8() } fn parse_str(input: &str) -> Result { @@ -322,7 +363,7 @@ impl FeltType for u8 { } fn display_felt(value: Felt) -> Result { - let native = u8::try_from(value.as_int()).map_err(|_| { + let native = u8::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!("value `{}` is out of range for u8", value)) })?; Ok(native.to_string()) @@ -330,8 +371,8 @@ impl FeltType for u8 { } impl FeltType for AuthScheme { - fn type_name() -> SchemaTypeId { - SchemaTypeId::auth_scheme() + fn type_name() -> SchemaType { + SchemaType::auth_scheme() } fn parse_str(input: &str) -> Result { @@ -340,23 +381,15 @@ impl FeltType for AuthScheme { SchemaTypeError::parse(input.to_string(), ::type_name(), err) })? } else { - match input { - "Falcon512Rpo" => AuthScheme::Falcon512Rpo, - "EcdsaK256Keccak" => AuthScheme::EcdsaK256Keccak, - _ => { - return Err(SchemaTypeError::ConversionError(format!( - "invalid auth scheme `{input}`: expected one of `Falcon512Rpo`, \ - `EcdsaK256Keccak`, `1`, `2`" - ))); - }, - } + AuthScheme::from_str(input) + .map_err(|err| SchemaTypeError::ConversionError(err.to_string()))? }; Ok(Felt::from(auth_scheme.as_u8())) } fn display_felt(value: Felt) -> Result { - let scheme_id = u8::try_from(value.as_int()).map_err(|_| { + let scheme_id = u8::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!( "value `{}` is out of range for auth scheme id", value @@ -372,8 +405,8 @@ impl FeltType for AuthScheme { } impl FeltType for u16 { - fn type_name() -> SchemaTypeId { - SchemaTypeId::u16() + fn type_name() -> SchemaType { + SchemaType::u16() } fn parse_str(input: &str) -> Result { @@ -384,7 +417,7 @@ impl FeltType for u16 { } fn display_felt(value: Felt) -> Result { - let native = u16::try_from(value.as_int()).map_err(|_| { + let native = u16::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!("value `{}` is out of range for u16", value)) })?; Ok(native.to_string()) @@ -392,8 +425,8 @@ impl FeltType for u16 { } impl FeltType for u32 { - fn type_name() -> SchemaTypeId { - SchemaTypeId::u32() + fn type_name() -> SchemaType { + SchemaType::u32() } fn parse_str(input: &str) -> Result { @@ -404,7 +437,7 @@ impl FeltType for u32 { } fn display_felt(value: Felt) -> Result { - let native = u32::try_from(value.as_int()).map_err(|_| { + let native = u32::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!("value `{}` is out of range for u32", value)) })?; Ok(native.to_string()) @@ -412,8 +445,8 @@ impl FeltType for u32 { } impl FeltType for Felt { - fn type_name() -> SchemaTypeId { - SchemaTypeId::native_felt() + fn type_name() -> SchemaType { + SchemaType::native_felt() } fn parse_str(input: &str) -> Result { @@ -429,13 +462,13 @@ impl FeltType for Felt { } fn display_felt(value: Felt) -> Result { - Ok(format!("0x{:x}", value.as_int())) + Ok(format!("0x{:x}", value.as_canonical_u64())) } } impl FeltType for TokenSymbol { - fn type_name() -> SchemaTypeId { - SchemaTypeId::token_symbol() + fn type_name() -> SchemaType { + SchemaType::token_symbol() } fn parse_str(input: &str) -> Result { let token = TokenSymbol::new(input).map_err(|err| { @@ -448,15 +481,10 @@ impl FeltType for TokenSymbol { let token = TokenSymbol::try_from(value).map_err(|err| { SchemaTypeError::ConversionError(format!( "invalid token_symbol value `{}`: {err}", - value.as_int() + value.as_canonical_u64() )) })?; - token.to_string().map_err(|err| { - SchemaTypeError::ConversionError(format!( - "failed to display token_symbol value `{}`: {err}", - value.as_int() - )) - }) + Ok(token.to_string()) } } @@ -483,8 +511,8 @@ fn pad_hex_string(input: &str) -> String { } impl WordType for Word { - fn type_name() -> SchemaTypeId { - SchemaTypeId::native_word() + fn type_name() -> SchemaType { + SchemaType::native_word() } fn parse_str(input: &str) -> Result { Word::parse(input).map_err(|err| { @@ -502,8 +530,8 @@ impl WordType for Word { } impl WordType for PublicKey { - fn type_name() -> SchemaTypeId { - SchemaTypeId::pub_key() + fn type_name() -> SchemaType { + SchemaType::pub_key() } fn parse_str(input: &str) -> Result { let padded_input = pad_hex_string(input); @@ -571,10 +599,10 @@ enum TypeKind { /// corresponding storage values. #[derive(Clone, Debug, Default)] pub struct SchemaTypeRegistry { - felt: BTreeMap, - word: BTreeMap, - felt_display: BTreeMap, - word_display: BTreeMap, + felt: BTreeMap, + word: BTreeMap, + felt_display: BTreeMap, + word_display: BTreeMap, } impl SchemaTypeRegistry { @@ -613,7 +641,7 @@ impl SchemaTypeRegistry { /// - If the type is not registered or if the conversion fails. pub fn try_parse_felt( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, value: &str, ) -> Result { let converter = self @@ -626,7 +654,7 @@ impl SchemaTypeRegistry { /// Validates that the given [`Felt`] conforms to the specified schema type. pub fn validate_felt_value( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, felt: Felt, ) -> Result<(), SchemaTypeError> { let display = self @@ -642,7 +670,7 @@ impl SchemaTypeRegistry { /// Validates that the given [`Word`] conforms to the specified schema type. pub fn validate_word_value( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, word: Word, ) -> Result<(), SchemaTypeError> { match self.type_kind(type_name) { @@ -663,15 +691,15 @@ impl SchemaTypeRegistry { /// /// This is intended for serializing schemas to TOML (e.g. default values). #[allow(dead_code)] - pub fn display_felt(&self, type_name: &SchemaTypeId, felt: Felt) -> String { + pub fn display_felt(&self, type_name: &SchemaType, felt: Felt) -> String { self.felt_display .get(type_name) .and_then(|display| display(felt).ok()) - .unwrap_or_else(|| format!("0x{:x}", felt.as_int())) + .unwrap_or_else(|| format!("0x{:x}", felt.as_canonical_u64())) } /// Converts a [`Word`] into a canonical string representation and reports how it was produced. - pub fn display_word(&self, type_name: &SchemaTypeId, word: Word) -> WordDisplay { + pub fn display_word(&self, type_name: &SchemaType, word: Word) -> WordDisplay { if let Some(display) = self.word_display.get(type_name) { let value = display(word).unwrap_or_else(|_| word.to_string()); return WordDisplay::Word(value); @@ -699,7 +727,7 @@ impl SchemaTypeRegistry { /// - If the type is not registered or if the conversion fails. pub fn try_parse_word( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, value: &str, ) -> Result { if let Some(converter) = self.word.get(type_name) { @@ -716,11 +744,11 @@ impl SchemaTypeRegistry { } /// Returns `true` if a `FeltType` is registered for the given type. - pub fn contains_felt_type(&self, type_name: &SchemaTypeId) -> bool { + pub fn contains_felt_type(&self, type_name: &SchemaType) -> bool { self.felt.contains_key(type_name) } - fn type_kind(&self, type_name: &SchemaTypeId) -> TypeKind { + fn type_kind(&self, type_name: &SchemaType) -> TypeKind { if self.contains_felt_type(type_name) { TypeKind::Felt } else { @@ -732,7 +760,7 @@ impl SchemaTypeRegistry { /// /// This also returns `true` for any registered felt type (as those can be embedded into a word /// with zero-padding). - pub fn contains_word_type(&self, type_name: &SchemaTypeId) -> bool { + pub fn contains_word_type(&self, type_name: &SchemaType) -> bool { self.word.contains_key(type_name) || self.felt.contains_key(type_name) } } @@ -743,7 +771,7 @@ mod tests { #[test] fn auth_scheme_type_supports_named_and_numeric_values() { - let auth_scheme_type = SchemaTypeId::auth_scheme(); + let auth_scheme_type = SchemaType::auth_scheme(); let numeric_word = SCHEMA_TYPE_REGISTRY .try_parse_word(&auth_scheme_type, "2") @@ -757,14 +785,15 @@ mod tests { let displayed = SCHEMA_TYPE_REGISTRY.display_word(&auth_scheme_type, numeric_word); assert!( - matches!(displayed, WordDisplay::Felt(ref value) if value == "Falcon512Rpo"), + matches!(displayed, WordDisplay::Felt(ref value) if value == "Falcon512Poseidon2"), "expected canonical auth scheme display, got {displayed:?}" ); } #[test] - fn auth_scheme_type_rejects_invalid_values() { - let auth_scheme_type = SchemaTypeId::auth_scheme(); + fn schema_types_reject_invalid_values() { + // Auth scheme rejects out-of-range and unknown values. + let auth_scheme_type = SchemaType::auth_scheme(); assert!(SCHEMA_TYPE_REGISTRY.try_parse_word(&auth_scheme_type, "9").is_err()); assert!(SCHEMA_TYPE_REGISTRY.try_parse_word(&auth_scheme_type, "invalid").is_err()); @@ -775,5 +804,19 @@ mod tests { .validate_word_value(&auth_scheme_type, invalid_word) .is_err() ); + + // Bool type parses "true"/"false"/"1"/"0" and rejects everything else. + let bool_type = SchemaType::bool(); + + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "true").unwrap(), Felt::new(1)); + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "false").unwrap(), Felt::new(0)); + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "1").unwrap(), Felt::new(1)); + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "0").unwrap(), Felt::new(0)); + assert_eq!(SCHEMA_TYPE_REGISTRY.display_felt(&bool_type, Felt::new(0)), "false"); + assert_eq!(SCHEMA_TYPE_REGISTRY.display_felt(&bool_type, Felt::new(1)), "true"); + + assert!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "yes").is_err()); + assert!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "2").is_err()); + assert!(SCHEMA_TYPE_REGISTRY.validate_felt_value(&bool_type, Felt::new(2)).is_err()); } } diff --git a/crates/miden-protocol/src/account/component/storage/value_name.rs b/crates/miden-protocol/src/account/component/storage/value_name.rs index b150dc362b..46568ebef6 100644 --- a/crates/miden-protocol/src/account/component/storage/value_name.rs +++ b/crates/miden-protocol/src/account/component/storage/value_name.rs @@ -3,12 +3,17 @@ use core::cmp::Ordering; use core::fmt::{self, Display}; use core::str::FromStr; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use thiserror::Error; use crate::account::StorageSlotName; use crate::errors::StorageSlotNameError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A simple wrapper type around a string key that identifies init-provided values. /// diff --git a/crates/miden-protocol/src/account/delta/mod.rs b/crates/miden-protocol/src/account/delta/mod.rs index 1b8d13d1f5..5fdddc577b 100644 --- a/crates/miden-protocol/src/account/delta/mod.rs +++ b/crates/miden-protocol/src/account/delta/mod.rs @@ -12,7 +12,13 @@ use crate::account::{ use crate::asset::AssetVault; use crate::crypto::SequentialCommit; use crate::errors::{AccountDeltaError, AccountError}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Word, ZERO}; mod storage; @@ -96,7 +102,7 @@ impl AccountDelta { pub fn merge(&mut self, other: Self) -> Result<(), AccountDeltaError> { let new_nonce_delta = self.nonce_delta + other.nonce_delta; - if new_nonce_delta.as_int() < self.nonce_delta.as_int() { + if new_nonce_delta.as_canonical_u64() < self.nonce_delta.as_canonical_u64() { return Err(AccountDeltaError::NonceIncrementOverflow { current: self.nonce_delta, increment: other.nonce_delta, @@ -190,22 +196,25 @@ impl AccountDelta { /// [`LexicographicWord`](crate::LexicographicWord). The WORD layout is in memory-order. /// /// - Append `[[nonce_delta, 0, account_id_suffix, account_id_prefix], EMPTY_WORD]`, where - /// account_id_{prefix,suffix} are the prefix and suffix felts of the native account id and - /// nonce_delta is the value by which the nonce was incremented. + /// `account_id_{prefix,suffix}` are the prefix and suffix felts of the native account id and + /// `nonce_delta` is the value by which the nonce was incremented. /// - Fungible Asset Delta /// - For each **updated** fungible asset, sorted by its vault key, whose amount delta is /// **non-zero**: - /// - Append `[domain = 1, was_added, 0, 0]`. - /// - Append `[amount, 0, faucet_id_suffix, faucet_id_prefix]` where amount is the delta by - /// which the fungible asset's amount has changed and was_added is a boolean flag - /// indicating whether the amount was added (1) or subtracted (0). + /// - Append `[domain = 1, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix]` + /// where `faucet_id_suffix_and_metadata` is the faucet ID suffix with asset metadata + /// (including the callbacks flag) encoded in the lower 8 bits. + /// - Append `[amount_delta, 0, 0, 0]` where `amount_delta` is the delta by which the + /// fungible asset's amount has changed and `was_added` is a boolean flag indicating + /// whether the amount was added (1) or subtracted (0). /// - Non-Fungible Asset Delta /// - For each **updated** non-fungible asset, sorted by its vault key: - /// - Append `[domain = 1, was_added, 0, 0]` where was_added is a boolean flag indicating - /// whether the asset was added (1) or removed (0). Note that the domain is the same for - /// assets since `faucet_id_prefix` is at the same position in the layout for both assets, - /// and, by design, it is never the same for fungible and non-fungible assets. - /// - Append `[hash0, hash1, hash2, faucet_id_prefix]`, i.e. the non-fungible asset. + /// - Append `[domain = 1, was_added, faucet_id_suffix, faucet_id_prefix]` where `was_added` + /// is a boolean flag indicating whether the asset was added (1) or removed (0). Note that + /// the domain is the same for assets since `faucet_id_suffix` and `faucet_id_prefix` are + /// at the same position in the layout for both assets, and, by design, they are never the + /// same for fungible and non-fungible assets. + /// - Append `[hash0, hash1, hash2, hash3]`, i.e. the non-fungible asset. /// - Storage Slots are sorted by slot ID and are iterated in this order. For each slot **whose /// value has changed**, depending on the slot type: /// - Value Slot @@ -269,7 +278,7 @@ impl AccountDelta { /// [ /// ID_AND_NONCE, EMPTY_WORD, /// [/* no fungible asset delta */], - /// [[domain = 1, was_added = 0, 0, 0], NON_FUNGIBLE_ASSET], + /// [[domain = 1, was_added = 0, faucet_id_suffix, faucet_id_prefix], NON_FUNGIBLE_ASSET], /// [/* no storage delta */] /// ] /// ``` @@ -279,14 +288,15 @@ impl AccountDelta { /// ID_AND_NONCE, EMPTY_WORD, /// [/* no fungible asset delta */], /// [/* no non-fungible asset delta */], - /// [[domain = 2, 0, slot_id_suffix = 0, slot_id_prefix = 0], NEW_VALUE] + /// [[domain = 2, 0, slot_id_suffix = faucet_id_suffix, slot_id_prefix = faucet_id_prefix], NEW_VALUE] /// ] /// ``` /// - /// `NEW_VALUE` is user-controllable so it can be crafted to match `NON_FUNGIBLE_ASSET`. The - /// domain separator is then the only value that differentiates these two deltas. This shows the - /// importance of placing the domain separators in the same index within each word's layout - /// which makes it easy to see that this value cannot be crafted to be the same. + /// `NEW_VALUE` is user-controllable so it can be crafted to match `NON_FUNGIBLE_ASSET`. Users + /// would have to choose a slot ID (at account creation time) that is equal to the faucet ID. + /// The domain separator is then the only value that differentiates these two deltas. This shows + /// the importance of placing the domain separators in the same index within each word's layout + /// to ensure users cannot craft an ambiguous delta. /// /// ### Number of Changed Entries /// @@ -587,8 +597,7 @@ fn validate_nonce( mod tests { use assert_matches::assert_matches; - use miden_core::utils::Serializable; - use miden_core::{Felt, FieldElement}; + use miden_core::Felt; use super::{AccountDelta, AccountStorageDelta, AccountVaultDelta}; use crate::account::delta::AccountUpdateDetails; @@ -600,6 +609,7 @@ mod tests { AccountStorageMode, AccountType, StorageMapDelta, + StorageMapKey, StorageSlotName, }; use crate::asset::{ @@ -615,6 +625,7 @@ mod tests { ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, AccountIdBuilder, }; + use crate::utils::serde::Serializable; use crate::{ONE, Word, ZERO}; #[test] @@ -684,8 +695,11 @@ mod tests { [( StorageSlotName::mock(4), StorageMapDelta::from_iters( - [Word::from([1, 1, 1, 0u32]), Word::from([0, 1, 1, 1u32])], - [(Word::from([1, 1, 1, 1u32]), Word::from([1, 1, 1, 1u32]))], + [ + StorageMapKey::from_array([1, 1, 1, 0]), + StorageMapKey::from_array([0, 1, 1, 1]), + ], + [(StorageMapKey::from_array([1, 1, 1, 1]), Word::from([1, 1, 1, 1u32]))], ), )], ); @@ -695,8 +709,7 @@ mod tests { AccountIdBuilder::new() .account_type(AccountType::NonFungibleFaucet) .storage_mode(AccountStorageMode::Public) - .build_with_rng(&mut rand::rng()) - .prefix(), + .build_with_rng(&mut rand::rng()), vec![6], ) .unwrap(), @@ -734,13 +747,8 @@ mod tests { let account_code = AccountCode::mock(); assert_eq!(account_code.to_bytes().len(), account_code.get_size_hint()); - let account = Account::new_existing( - account_id, - asset_vault, - account_storage, - account_code, - Felt::ONE, - ); + let account = + Account::new_existing(account_id, asset_vault, account_storage, account_code, ONE); assert_eq!(account.to_bytes().len(), account.get_size_hint()); // AccountUpdateDetails diff --git a/crates/miden-protocol/src/account/delta/storage.rs b/crates/miden-protocol/src/account/delta/storage.rs index bd600ac0e0..dd8249c046 100644 --- a/crates/miden-protocol/src/account/delta/storage.rs +++ b/crates/miden-protocol/src/account/delta/storage.rs @@ -11,7 +11,13 @@ use super::{ Serializable, Word, }; -use crate::account::{StorageMap, StorageSlotContent, StorageSlotName, StorageSlotType}; +use crate::account::{ + StorageMap, + StorageMapKey, + StorageSlotContent, + StorageSlotName, + StorageSlotType, +}; use crate::{EMPTY_WORD, Felt, LexicographicWord, ZERO}; // ACCOUNT STORAGE DELTA @@ -103,7 +109,7 @@ impl AccountStorageDelta { pub fn set_map_item( &mut self, slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, new_value: Word, ) -> Result<(), AccountDeltaError> { match self @@ -194,9 +200,10 @@ impl AccountStorageDelta { elements.extend_from_slice(value.as_elements()); } - let num_changed_entries = Felt::try_from(map_delta.num_entries()).expect( - "number of changed entries should not exceed max representable felt", - ); + let num_changed_entries = Felt::try_from(map_delta.num_entries() as u64) + .expect( + "number of changed entries should not exceed max representable felt", + ); elements.extend_from_slice(&[ DOMAIN_MAP, @@ -288,12 +295,10 @@ impl Deserializable for AccountStorageDelta { } let num_maps = source.read_u8()? as usize; - deltas.extend( - source - .read_many::<(StorageSlotName, StorageMapDelta)>(num_maps)? - .into_iter() - .map(|(slot_name, map_delta)| (slot_name, StorageSlotDelta::Map(map_delta))), - ); + for read_result in source.read_many_iter::<(StorageSlotName, StorageMapDelta)>(num_maps)? { + let (slot_name, map_delta) = read_result?; + deltas.insert(slot_name, StorageSlotDelta::Map(map_delta)); + } Ok(Self::from_raw(deltas)) } @@ -465,11 +470,11 @@ impl Deserializable for StorageSlotDelta { /// The [`LexicographicWord`] wrapper is necessary to order the keys in the same way as the /// in-kernel account delta which uses a link map. #[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct StorageMapDelta(BTreeMap); +pub struct StorageMapDelta(BTreeMap, Word>); impl StorageMapDelta { /// Creates a new storage map delta from the provided leaves. - pub fn new(map: BTreeMap) -> Self { + pub fn new(map: BTreeMap, Word>) -> Self { Self(map) } @@ -480,14 +485,14 @@ impl StorageMapDelta { /// Returns a reference to the updated entries in this storage map delta. /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> &BTreeMap { + /// Note that the returned key is the [`StorageMapKey`]. + pub fn entries(&self) -> &BTreeMap, Word> { &self.0 } /// Inserts an item into the storage map delta. - pub fn insert(&mut self, raw_key: Word, value: Word) { - self.0.insert(LexicographicWord::new(raw_key), value); + pub fn insert(&mut self, key: StorageMapKey, value: Word) { + self.0.insert(LexicographicWord::new(key), value); } /// Returns true if storage map delta contains no updates. @@ -502,17 +507,17 @@ impl StorageMapDelta { } /// Returns a mutable reference to the underlying map. - pub fn as_map_mut(&mut self) -> &mut BTreeMap { + pub fn as_map_mut(&mut self) -> &mut BTreeMap, Word> { &mut self.0 } /// Returns an iterator of all the cleared keys in the storage map. - fn cleared_keys(&self) -> impl Iterator + '_ { + fn cleared_keys(&self) -> impl Iterator + '_ { self.0.iter().filter(|&(_, value)| value.is_empty()).map(|(key, _)| key.inner()) } /// Returns an iterator of all the updated entries in the storage map. - fn updated_entries(&self) -> impl Iterator + '_ { + fn updated_entries(&self) -> impl Iterator + '_ { self.0.iter().filter_map(|(key, value)| { if !value.is_empty() { Some((key.inner(), value)) @@ -527,8 +532,8 @@ impl StorageMapDelta { impl StorageMapDelta { /// Creates a new [StorageMapDelta] from the provided iterators. pub fn from_iters( - cleared_leaves: impl IntoIterator, - updated_leaves: impl IntoIterator, + cleared_leaves: impl IntoIterator, + updated_leaves: impl IntoIterator, ) -> Self { Self(BTreeMap::from_iter( cleared_leaves @@ -543,7 +548,7 @@ impl StorageMapDelta { } /// Consumes self and returns the underlying map. - pub fn into_map(self) -> BTreeMap { + pub fn into_map(self) -> BTreeMap, Word> { self.0 } } @@ -562,8 +567,8 @@ impl From for StorageMapDelta { impl Serializable for StorageMapDelta { fn write_into(&self, target: &mut W) { - let cleared: Vec<&Word> = self.cleared_keys().collect(); - let updated: Vec<(&Word, &Word)> = self.updated_entries().collect(); + let cleared: Vec<&StorageMapKey> = self.cleared_keys().collect(); + let updated: Vec<(&StorageMapKey, &Word)> = self.updated_entries().collect(); target.write_usize(cleared.len()); target.write_many(cleared.iter()); @@ -573,18 +578,16 @@ impl Serializable for StorageMapDelta { } fn get_size_hint(&self) -> usize { - let word_size = EMPTY_WORD.get_size_hint(); - let cleared_keys_count = self.cleared_keys().count(); let updated_entries_count = self.updated_entries().count(); // Cleared Keys cleared_keys_count.get_size_hint() + - cleared_keys_count * Word::SERIALIZED_SIZE + + cleared_keys_count * StorageMapKey::SERIALIZED_SIZE + // Updated Entries updated_entries_count.get_size_hint() + - updated_entries_count * (Word::SERIALIZED_SIZE + word_size) + updated_entries_count * (StorageMapKey::SERIALIZED_SIZE + Word::SERIALIZED_SIZE) } } @@ -617,7 +620,7 @@ mod tests { use assert_matches::assert_matches; use super::{AccountStorageDelta, Deserializable, Serializable}; - use crate::account::{StorageMapDelta, StorageSlotDelta, StorageSlotName}; + use crate::account::{StorageMapDelta, StorageMapKey, StorageSlotDelta, StorageSlotName}; use crate::errors::AccountDeltaError; use crate::{ONE, Word}; @@ -633,7 +636,7 @@ mod tests { ); let err = delta - .set_map_item(value_slot_name.clone(), Word::empty(), Word::empty()) + .set_map_item(value_slot_name.clone(), StorageMapKey::empty(), Word::empty()) .unwrap_err(); assert_matches!(err, AccountDeltaError::StorageSlotUsedAsDifferentTypes(slot_name) => { assert_eq!(value_slot_name, slot_name) @@ -674,11 +677,13 @@ mod tests { let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); let storage_delta = AccountStorageDelta::from_iters([StorageSlotName::mock(1)], [], []); let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); let storage_delta = AccountStorageDelta::from_iters( [], @@ -688,6 +693,7 @@ mod tests { let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); let storage_delta = AccountStorageDelta::from_iters( [], @@ -697,6 +703,7 @@ mod tests { let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); } #[test] @@ -706,13 +713,16 @@ mod tests { let deserialized = StorageMapDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_map_delta); - let storage_map_delta = StorageMapDelta::from_iters([Word::from([ONE, ONE, ONE, ONE])], []); + let storage_map_delta = + StorageMapDelta::from_iters([StorageMapKey::from_array([1, 1, 1, 1])], []); let serialized = storage_map_delta.to_bytes(); let deserialized = StorageMapDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_map_delta); - let storage_map_delta = - StorageMapDelta::from_iters([], [(Word::empty(), Word::from([ONE, ONE, ONE, ONE]))]); + let storage_map_delta = StorageMapDelta::from_iters( + [], + [(StorageMapKey::empty(), Word::from([ONE, ONE, ONE, ONE]))], + ); let serialized = storage_map_delta.to_bytes(); let deserialized = StorageMapDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_map_delta); @@ -739,8 +749,8 @@ mod tests { assert_eq!(deserialized, slot_delta); let map_delta = StorageMapDelta::from_iters( - [Word::from([1, 2, 3, 4u32])], - [(Word::from([5, 6, 7, 8u32]), Word::from([3, 4, 5, 6u32]))], + [StorageMapKey::from_array([1, 2, 3, 4])], + [(StorageMapKey::from_array([5, 6, 7, 8]), Word::from([3, 4, 5, 6u32]))], ); let slot_delta = StorageSlotDelta::Map(map_delta); let serialized = slot_delta.to_bytes(); @@ -786,7 +796,7 @@ mod tests { #[test] fn merge_maps(#[case] x: Option, #[case] y: Option, #[case] expected: Option) { fn create_delta(value: Option) -> StorageMapDelta { - let key = Word::from([10u32, 0, 0, 0]); + let key = StorageMapKey::from_array([10, 0, 0, 0]); match value { Some(value) => { StorageMapDelta::from_iters([], [(key, Word::from([value, 0, 0, 0]))]) diff --git a/crates/miden-protocol/src/account/delta/vault.rs b/crates/miden-protocol/src/account/delta/vault.rs index 959b813764..6b88fdb1d7 100644 --- a/crates/miden-protocol/src/account/delta/vault.rs +++ b/crates/miden-protocol/src/account/delta/vault.rs @@ -11,9 +11,9 @@ use super::{ DeserializationError, Serializable, }; -use crate::account::{AccountId, AccountType}; -use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; -use crate::{Felt, LexicographicWord, ONE, Word, ZERO}; +use crate::account::AccountType; +use crate::asset::{Asset, AssetVaultKey, FungibleAsset, NonFungibleAsset}; +use crate::{Felt, ONE, ZERO}; // ACCOUNT VAULT DELTA // ================================================================================================ @@ -100,8 +100,6 @@ impl AccountVaultDelta { added_assets: impl IntoIterator, removed_assets: impl IntoIterator, ) -> Self { - use crate::asset::Asset; - let mut fungible = FungibleAssetDelta::default(); let mut non_fungible = NonFungibleAssetDelta::default(); @@ -132,32 +130,42 @@ impl AccountVaultDelta { /// Returns an iterator over the added assets in this delta. pub fn added_assets(&self) -> impl Iterator + '_ { - use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; self.fungible .0 .iter() .filter(|&(_, &value)| value >= 0) - .map(|(&faucet_id, &diff)| { - Asset::Fungible(FungibleAsset::new(faucet_id, diff.unsigned_abs()).unwrap()) + .map(|(vault_key, &diff)| { + Asset::Fungible( + FungibleAsset::new(vault_key.faucet_id(), diff.unsigned_abs()) + .unwrap() + .with_callbacks(vault_key.callback_flag()), + ) }) - .chain(self.non_fungible.filter_by_action(NonFungibleDeltaAction::Add).map(|key| { - Asset::NonFungible(unsafe { NonFungibleAsset::new_unchecked(key.into()) }) - })) + .chain( + self.non_fungible + .filter_by_action(NonFungibleDeltaAction::Add) + .map(Asset::NonFungible), + ) } /// Returns an iterator over the removed assets in this delta. pub fn removed_assets(&self) -> impl Iterator + '_ { - use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; self.fungible .0 .iter() .filter(|&(_, &value)| value < 0) - .map(|(&faucet_id, &diff)| { - Asset::Fungible(FungibleAsset::new(faucet_id, diff.unsigned_abs()).unwrap()) + .map(|(vault_key, &diff)| { + Asset::Fungible( + FungibleAsset::new(vault_key.faucet_id(), diff.unsigned_abs()) + .unwrap() + .with_callbacks(vault_key.callback_flag()), + ) }) - .chain(self.non_fungible.filter_by_action(NonFungibleDeltaAction::Remove).map(|key| { - Asset::NonFungible(unsafe { NonFungibleAsset::new_unchecked(key.into()) }) - })) + .chain( + self.non_fungible + .filter_by_action(NonFungibleDeltaAction::Remove) + .map(Asset::NonFungible), + ) } } @@ -185,15 +193,18 @@ impl Deserializable for AccountVaultDelta { // ================================================================================================ /// A binary tree map of fungible asset balance changes in the account vault. +/// +/// The [`AssetVaultKey`] orders the assets in the same way as the in-kernel account delta which +/// uses a link map. #[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct FungibleAssetDelta(BTreeMap); +pub struct FungibleAssetDelta(BTreeMap); impl FungibleAssetDelta { /// Validates and creates a new fungible asset delta. /// /// # Errors /// Returns an error if the delta does not pass the validation. - pub fn new(map: BTreeMap) -> Result { + pub fn new(map: BTreeMap) -> Result { let delta = Self(map); delta.validate()?; @@ -205,8 +216,8 @@ impl FungibleAssetDelta { /// # Errors /// Returns an error if the delta would overflow. pub fn add(&mut self, asset: FungibleAsset) -> Result<(), AccountDeltaError> { - let amount: i64 = asset.amount().try_into().expect("Amount it too high"); - self.add_delta(asset.faucet_id(), amount) + let amount: i64 = asset.amount().inner().try_into().expect("Amount it too high"); + self.add_delta(asset.vault_key(), amount) } /// Removes a fungible asset from the delta. @@ -214,13 +225,13 @@ impl FungibleAssetDelta { /// # Errors /// Returns an error if the delta would overflow. pub fn remove(&mut self, asset: FungibleAsset) -> Result<(), AccountDeltaError> { - let amount: i64 = asset.amount().try_into().expect("Amount it too high"); - self.add_delta(asset.faucet_id(), -amount) + let amount: i64 = asset.amount().inner().try_into().expect("Amount it too high"); + self.add_delta(asset.vault_key(), -amount) } - /// Returns the amount of the fungible asset with the given faucet ID. - pub fn amount(&self, faucet_id: &AccountId) -> Option { - self.0.get(faucet_id).copied() + /// Returns the amount of the fungible asset with the given vault key. + pub fn amount(&self, vault_key: &AssetVaultKey) -> Option { + self.0.get(vault_key).copied() } /// Returns the number of fungible assets affected in the delta. @@ -234,7 +245,7 @@ impl FungibleAssetDelta { } /// Returns an iterator over the (key, value) pairs of the map. - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator { self.0.iter() } @@ -250,8 +261,8 @@ impl FungibleAssetDelta { // Track fungible asset amounts - positive and negative. `i64` is not lossy while // fungibles are restricted to 2^63-1. Overflow is still possible but we check for that. - for (&faucet_id, &amount) in other.0.iter() { - self.add_delta(faucet_id, amount)?; + for (&vault_key, &amount) in other.0.iter() { + self.add_delta(vault_key, amount)?; } Ok(()) @@ -265,8 +276,8 @@ impl FungibleAssetDelta { /// /// # Errors /// Returns an error if the delta would overflow. - fn add_delta(&mut self, faucet_id: AccountId, delta: i64) -> Result<(), AccountDeltaError> { - match self.0.entry(faucet_id) { + fn add_delta(&mut self, vault_key: AssetVaultKey, delta: i64) -> Result<(), AccountDeltaError> { + match self.0.entry(vault_key) { Entry::Vacant(entry) => { // Only track non-zero amounts. if delta != 0 { @@ -277,7 +288,7 @@ impl FungibleAssetDelta { let old = *entry.get(); let new = old.checked_add(delta).ok_or( AccountDeltaError::FungibleAssetDeltaOverflow { - faucet_id, + faucet_id: vault_key.faucet_id(), current: old, delta, }, @@ -299,9 +310,9 @@ impl FungibleAssetDelta { /// # Errors /// Returns an error if one or more fungible assets' faucet IDs are invalid. fn validate(&self) -> Result<(), AccountDeltaError> { - for faucet_id in self.0.keys() { - if !matches!(faucet_id.account_type(), AccountType::FungibleFaucet) { - return Err(AccountDeltaError::NotAFungibleFaucetId(*faucet_id)); + for vault_key in self.0.keys() { + if !matches!(vault_key.faucet_id().account_type(), AccountType::FungibleFaucet) { + return Err(AccountDeltaError::NotAFungibleFaucetId(vault_key.faucet_id())); } } @@ -314,12 +325,12 @@ impl FungibleAssetDelta { /// Note that the order in which elements are appended should be the link map key ordering. This /// is fulfilled here because the link map key's most significant element takes precedence over /// less significant ones. The most significant element in the fungible asset delta is the - /// account ID prefix and the delta happens to be sorted by account IDs. Since the account ID + /// faucet ID prefix and the delta happens to be sorted by vault keys. Since the faucet ID /// prefix is unique, it will always decide on the ordering of a link map key, so less /// significant elements are unimportant. This implicit sort should therefore always match the /// link map key ordering, however this is subtle and fragile. pub(super) fn append_delta_elements(&self, elements: &mut Vec) { - for (faucet_id, amount_delta) in self.iter() { + for (vault_key, amount_delta) in self.iter() { // Note that this iterator is guaranteed to never yield zero amounts, so we don't have // to exclude those explicitly. debug_assert_ne!( @@ -327,12 +338,18 @@ impl FungibleAssetDelta { "fungible asset iterator should never yield amount deltas of 0" ); - let asset = FungibleAsset::new(*faucet_id, amount_delta.unsigned_abs()) - .expect("absolute amount delta should be less than i64::MAX"); let was_added = if *amount_delta > 0 { ONE } else { ZERO }; - - elements.extend_from_slice(&[DOMAIN_ASSET, was_added, ZERO, ZERO]); - elements.extend_from_slice(Word::from(asset).as_elements()); + let amount_delta = Felt::try_from(amount_delta.unsigned_abs()) + .expect("amount delta should be less than i64::MAX"); + + let key_word = vault_key.to_word(); + elements.extend_from_slice(&[ + DOMAIN_ASSET, + was_added, + key_word[2], // faucet_id_suffix_and_metadata + key_word[3], // faucet_id_prefix + ]); + elements.extend_from_slice(&[amount_delta, ZERO, ZERO, ZERO]); } } } @@ -343,11 +360,13 @@ impl Serializable for FungibleAssetDelta { // TODO: We save `i64` as `u64` since winter utils only supports unsigned integers for now. // We should update this code (and deserialization as well) once it supports signed // integers. - target.write_many(self.0.iter().map(|(&faucet_id, &delta)| (faucet_id, delta as u64))); + // TODO: If we keep this code, optimize by not serializing asset ID (which is always 0). + target.write_many(self.0.iter().map(|(vault_key, &delta)| (*vault_key, delta as u64))); } fn get_size_hint(&self) -> usize { - self.0.len().get_size_hint() + self.0.len() * FungibleAsset::SERIALIZED_SIZE + const ENTRY_SIZE: usize = AssetVaultKey::SERIALIZED_SIZE + core::mem::size_of::(); + self.0.len().get_size_hint() + self.0.len() * ENTRY_SIZE } } @@ -355,13 +374,12 @@ impl Deserializable for FungibleAssetDelta { fn read_from(source: &mut R) -> Result { let num_fungible_assets = source.read_usize()?; // TODO: We save `i64` as `u64` since winter utils only supports unsigned integers for now. - // We should update this code (and serialization as well) once it support signeds - // integers. + // We should update this code (and serialization as well) once it supports signed + // integers. let map = source - .read_many::<(AccountId, u64)>(num_fungible_assets)? - .into_iter() - .map(|(account_id, delta_as_u64)| (account_id, delta_as_u64 as i64)) - .collect(); + .read_many_iter::<(AssetVaultKey, u64)>(num_fungible_assets)? + .map(|result| result.map(|(vault_key, delta_as_u64)| (vault_key, delta_as_u64 as i64))) + .collect::>()?; Self::new(map).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } @@ -372,17 +390,17 @@ impl Deserializable for FungibleAssetDelta { /// A binary tree map of non-fungible asset changes (addition and removal) in the account vault. /// -/// The [`LexicographicWord`] wrapper is necessary to order the assets in the same way as the -/// in-kernel account delta which uses a link map. +/// The [`AssetVaultKey`] orders the assets in the same way as the in-kernel account delta which +/// uses a link map. #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct NonFungibleAssetDelta( - BTreeMap, NonFungibleDeltaAction>, + BTreeMap, ); impl NonFungibleAssetDelta { /// Creates a new non-fungible asset delta. pub const fn new( - map: BTreeMap, NonFungibleDeltaAction>, + map: BTreeMap, ) -> Self { Self(map) } @@ -415,7 +433,9 @@ impl NonFungibleAssetDelta { /// Returns an iterator over the (key, value) pairs of the map. pub fn iter(&self) -> impl Iterator { - self.0.iter().map(|(key, value)| (key.inner(), value)) + self.0 + .iter() + .map(|(_key, (non_fungible_asset, delta_action))| (non_fungible_asset, delta_action)) } /// Merges another delta into this one, overwriting any existing values. @@ -426,8 +446,8 @@ impl NonFungibleAssetDelta { /// Returns an error if duplicate non-fungible assets are added or removed. pub fn merge(&mut self, other: Self) -> Result<(), AccountDeltaError> { // Merge non-fungible assets. Each non-fungible asset can cancel others out. - for (&key, &action) in other.0.iter() { - self.apply_action(key.into_inner(), action)?; + for (&asset, &action) in other.iter() { + self.apply_action(asset, action)?; } Ok(()) @@ -446,13 +466,13 @@ impl NonFungibleAssetDelta { asset: NonFungibleAsset, action: NonFungibleDeltaAction, ) -> Result<(), AccountDeltaError> { - match self.0.entry(LexicographicWord::new(asset)) { + match self.0.entry(asset.vault_key()) { Entry::Vacant(entry) => { - entry.insert(action); + entry.insert((asset, action)); }, Entry::Occupied(entry) => { - let previous = *entry.get(); - if previous == action { + let (_prev_asset, previous_action) = *entry.get(); + if previous_action == action { // Asset cannot be added nor removed twice. return Err(AccountDeltaError::DuplicateNonFungibleVaultUpdate(asset)); } @@ -471,8 +491,8 @@ impl NonFungibleAssetDelta { ) -> impl Iterator + '_ { self.0 .iter() - .filter(move |&(_, cur_action)| cur_action == &action) - .map(|(key, _)| key.into_inner()) + .filter(move |&(_, (_asset, cur_action))| cur_action == &action) + .map(|(_key, (asset, _action))| *asset) } /// Appends the non-fungible asset vault delta to the given `elements` from which the delta @@ -484,8 +504,14 @@ impl NonFungibleAssetDelta { NonFungibleDeltaAction::Add => ONE, }; - elements.extend_from_slice(&[DOMAIN_ASSET, was_added, ZERO, ZERO]); - elements.extend_from_slice(Word::from(*asset).as_elements()); + let key_word = asset.vault_key().to_word(); + elements.extend_from_slice(&[ + DOMAIN_ASSET, + was_added, + key_word[2], // faucet_id_suffix_and_metadata + key_word[3], // faucet_id_prefix + ]); + elements.extend_from_slice(asset.to_value_word().as_elements()); } } } @@ -519,14 +545,14 @@ impl Deserializable for NonFungibleAssetDelta { let num_added = source.read_usize()?; for _ in 0..num_added { - let added_asset = source.read()?; - map.insert(LexicographicWord::new(added_asset), NonFungibleDeltaAction::Add); + let added_asset: NonFungibleAsset = source.read()?; + map.insert(added_asset.vault_key(), (added_asset, NonFungibleDeltaAction::Add)); } let num_removed = source.read_usize()?; for _ in 0..num_removed { - let removed_asset = source.read()?; - map.insert(LexicographicWord::new(removed_asset), NonFungibleDeltaAction::Remove); + let removed_asset: NonFungibleAsset = source.read()?; + map.insert(removed_asset.vault_key(), (removed_asset, NonFungibleDeltaAction::Remove)); } Ok(Self::new(map)) @@ -545,7 +571,7 @@ pub enum NonFungibleDeltaAction { #[cfg(test)] mod tests { use super::{AccountVaultDelta, Deserializable, Serializable}; - use crate::account::{AccountId, AccountIdPrefix}; + use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; use crate::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, @@ -627,11 +653,11 @@ mod tests { /// Creates an [AccountVaultDelta] with an optional [NonFungibleAsset] delta. This delta /// will be added if `Some(true)`, removed for `Some(false)` and missing for `None`. fn create_delta_with_non_fungible( - account_id_prefix: AccountIdPrefix, + account_id: AccountId, added: Option, ) -> AccountVaultDelta { let asset: Asset = NonFungibleAsset::new( - &NonFungibleAssetDetails::new(account_id_prefix, vec![1, 2, 3]).unwrap(), + &NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(), ) .unwrap() .into(); @@ -643,7 +669,7 @@ mod tests { } } - let account_id = NonFungibleAsset::mock_issuer().prefix(); + let account_id = NonFungibleAsset::mock_issuer(); let mut delta_x = create_delta_with_non_fungible(account_id, x); let delta_y = create_delta_with_non_fungible(account_id, y); diff --git a/crates/miden-protocol/src/account/file.rs b/crates/miden-protocol/src/account/file.rs index 4b6f5d4287..64e979cdb2 100644 --- a/crates/miden-protocol/src/account/file.rs +++ b/crates/miden-protocol/src/account/file.rs @@ -119,8 +119,8 @@ mod tests { let storage = AccountStorage::new(vec![]).unwrap(); let nonce = Felt::new(1); let account = Account::new_existing(id, vault, storage, code, nonce); - let auth_secret_key = AuthSecretKey::new_falcon512_rpo(); - let auth_secret_key_2 = AuthSecretKey::new_falcon512_rpo(); + let auth_secret_key = AuthSecretKey::new_falcon512_poseidon2(); + let auth_secret_key_2 = AuthSecretKey::new_falcon512_poseidon2(); AccountFile::new(account, vec![auth_secret_key, auth_secret_key_2]) } diff --git a/crates/miden-protocol/src/account/header.rs b/crates/miden-protocol/src/account/header.rs index 965b1a66f4..93635fa3a3 100644 --- a/crates/miden-protocol/src/account/header.rs +++ b/crates/miden-protocol/src/account/header.rs @@ -14,7 +14,13 @@ use crate::transaction::memory::{ ACCT_VAULT_ROOT_OFFSET, MemoryOffset, }; -use crate::utils::serde::{Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{WORD_SIZE, Word, WordError}; // ACCOUNT HEADER @@ -69,10 +75,10 @@ impl AccountHeader { }); } - let id = AccountId::try_from([ - elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_ID_PREFIX_IDX], + let id = AccountId::try_from_elements( elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_ID_SUFFIX_IDX], - ]) + elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_ID_PREFIX_IDX], + ) .map_err(AccountError::FinalAccountHeaderIdParsingFailed)?; let nonce = elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_NONCE_IDX]; let vault_root = parse_word(elements, ACCT_VAULT_ROOT_OFFSET) @@ -197,7 +203,7 @@ impl SequentialCommit for AccountHeader { // ================================================================================================ impl Serializable for AccountHeader { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.id.write_into(target); self.nonce.write_into(target); self.vault_root.write_into(target); @@ -207,9 +213,7 @@ impl Serializable for AccountHeader { } impl Deserializable for AccountHeader { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let id = AccountId::read_from(source)?; let nonce = Felt::read_from(source)?; let vault_root = Word::read_from(source)?; @@ -240,13 +244,13 @@ fn parse_word(data: &[Felt], offset: MemoryOffset) -> Result { #[cfg(test)] mod tests { use miden_core::Felt; - use miden_core::utils::{Deserializable, Serializable}; use super::AccountHeader; use crate::Word; use crate::account::StorageSlotContent; use crate::account::tests::build_account; use crate::asset::FungibleAsset; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_serde_account_storage() { diff --git a/crates/miden-protocol/src/account/mod.rs b/crates/miden-protocol/src/account/mod.rs index 5a6dd7b045..0396dbac43 100644 --- a/crates/miden-protocol/src/account/mod.rs +++ b/crates/miden-protocol/src/account/mod.rs @@ -55,6 +55,8 @@ pub use storage::{ PartialStorage, PartialStorageMap, StorageMap, + StorageMapKey, + StorageMapKeyHash, StorageMapWitness, StorageSlot, StorageSlotContent, @@ -344,7 +346,7 @@ impl Account { pub fn increment_nonce(&mut self, nonce_delta: Felt) -> Result<(), AccountError> { let new_nonce = self.nonce + nonce_delta; - if new_nonce.as_int() < self.nonce.as_int() { + if new_nonce.as_canonical_u64() < self.nonce.as_canonical_u64() { return Err(AccountError::NonceOverflow { current: self.nonce, increment: nonce_delta, @@ -551,7 +553,6 @@ mod tests { use assert_matches::assert_matches; use miden_assembly::Assembler; - use miden_core::FieldElement; use miden_crypto::utils::{Deserializable, Serializable}; use miden_crypto::{Felt, Word}; @@ -574,6 +575,7 @@ mod tests { PartialAccount, StorageMap, StorageMapDelta, + StorageMapKey, StorageSlot, StorageSlotContent, StorageSlotName, @@ -635,7 +637,7 @@ mod tests { let storage_slot_value_1 = StorageSlotContent::Value(Word::from([5, 6, 7, 8u32])); let mut storage_map = StorageMap::with_entries([ ( - Word::new([Felt::new(101), Felt::new(102), Felt::new(103), Felt::new(104)]), + StorageMapKey::from_array([101, 102, 103, 104]), Word::from([ Felt::new(1_u64), Felt::new(2_u64), @@ -644,7 +646,7 @@ mod tests { ]), ), ( - Word::new([Felt::new(105), Felt::new(106), Felt::new(107), Felt::new(108)]), + StorageMapKey::from_array([105, 106, 107, 108]), Word::new([Felt::new(5_u64), Felt::new(6_u64), Felt::new(7_u64), Felt::new(8_u64)]), ), ]) @@ -658,14 +660,11 @@ mod tests { ); // update storage map - let new_map_entry = ( - Word::new([Felt::new(101), Felt::new(102), Felt::new(103), Felt::new(104)]), - [Felt::new(9_u64), Felt::new(10_u64), Felt::new(11_u64), Felt::new(12_u64)], - ); + let key = StorageMapKey::from_array([101, 102, 103, 104]); + let value = Word::from([9, 10, 11, 12u32]); - let updated_map = - StorageMapDelta::from_iters([], [(new_map_entry.0, new_map_entry.1.into())]); - storage_map.insert(new_map_entry.0, new_map_entry.1.into()).unwrap(); + let updated_map = StorageMapDelta::from_iters([], [(key, value)]); + storage_map.insert(key, value).unwrap(); // build account delta let final_nonce = Felt::new(2); @@ -804,10 +803,14 @@ mod tests { let library1 = Assembler::default().assemble_library([code1]).unwrap(); // This component support all account types except the regular account with updatable code. - let metadata = AccountComponentMetadata::new("test::component1") - .with_supported_type(AccountType::FungibleFaucet) - .with_supported_type(AccountType::NonFungibleFaucet) - .with_supported_type(AccountType::RegularAccountImmutableCode); + let metadata = AccountComponentMetadata::new( + "test::component1", + [ + AccountType::FungibleFaucet, + AccountType::NonFungibleFaucet, + AccountType::RegularAccountImmutableCode, + ], + ); let component1 = AccountComponent::new(library1, vec![], metadata).unwrap(); let err = Account::initialize_from_components( diff --git a/crates/miden-protocol/src/account/partial.rs b/crates/miden-protocol/src/account/partial.rs index 6c9849e441..414c2ef067 100644 --- a/crates/miden-protocol/src/account/partial.rs +++ b/crates/miden-protocol/src/account/partial.rs @@ -1,7 +1,6 @@ use alloc::string::ToString; use alloc::vec::Vec; -use miden_core::utils::{Deserializable, Serializable}; use miden_core::{Felt, ZERO}; use super::{Account, AccountCode, AccountId, PartialStorage}; @@ -10,7 +9,13 @@ use crate::account::{AccountHeader, validate_account_seed}; use crate::asset::PartialVault; use crate::crypto::SequentialCommit; use crate::errors::AccountError; -use crate::utils::serde::DeserializationError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of an account. /// @@ -209,7 +214,7 @@ impl SequentialCommit for PartialAccount { // ================================================================================================ impl Serializable for PartialAccount { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write(self.id); target.write(self.nonce); target.write(&self.code); @@ -220,9 +225,7 @@ impl Serializable for PartialAccount { } impl Deserializable for PartialAccount { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let account_id = source.read()?; let nonce = source.read()?; let account_code = source.read()?; diff --git a/crates/miden-protocol/src/account/storage/header.rs b/crates/miden-protocol/src/account/storage/header.rs index 359e6ff49e..cc809a28d6 100644 --- a/crates/miden-protocol/src/account/storage/header.rs +++ b/crates/miden-protocol/src/account/storage/header.rs @@ -5,6 +5,7 @@ use alloc::vec::Vec; use super::map::EMPTY_STORAGE_MAP_ROOT; use super::{AccountStorage, Felt, StorageSlotType, Word}; +use crate::ZERO; use crate::account::{StorageSlot, StorageSlotId, StorageSlotName}; use crate::crypto::SequentialCommit; use crate::errors::AccountError; @@ -15,7 +16,6 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::{FieldElement, ZERO}; // ACCOUNT STORAGE HEADER // ================================================================================================ @@ -233,7 +233,8 @@ impl Serializable for AccountStorageHeader { impl Deserializable for AccountStorageHeader { fn read_from(source: &mut R) -> Result { let len = source.read_u8()?; - let slots: Vec = source.read_many(len as usize)?; + let slots: Vec = + source.read_many_iter(len as usize)?.collect::>()?; Self::new(slots).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } @@ -351,12 +352,12 @@ mod tests { use alloc::string::ToString; use miden_core::Felt; - use miden_core::utils::{Deserializable, Serializable}; use super::AccountStorageHeader; use crate::Word; use crate::account::{AccountStorage, StorageSlotHeader, StorageSlotName, StorageSlotType}; use crate::testing::storage::{MOCK_MAP_SLOT, MOCK_VALUE_SLOT0, MOCK_VALUE_SLOT1}; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_from_account_storage() { diff --git a/crates/miden-protocol/src/account/storage/map/key.rs b/crates/miden-protocol/src/account/storage/map/key.rs new file mode 100644 index 0000000000..dda1a09c36 --- /dev/null +++ b/crates/miden-protocol/src/account/storage/map/key.rs @@ -0,0 +1,125 @@ +use alloc::string::String; + +use miden_crypto::merkle::smt::{LeafIndex, SMT_DEPTH}; +use miden_protocol_macros::WordWrapper; + +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, Word}; + +// STORAGE MAP KEY +// ================================================================================================ + +/// A raw, user-chosen key for a [`StorageMap`](super::StorageMap). +/// +/// Storage map keys are user-chosen and thus not necessarily uniformly distributed. To mitigate +/// potential tree imbalance, keys are hashed before being inserted into the underlying SMT. +/// +/// Use [`StorageMapKey::hash`] to produce the corresponding [`StorageMapKeyHash`] that is used +/// in the SMT. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, WordWrapper)] +pub struct StorageMapKey(Word); + +impl StorageMapKey { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// The serialized size of the map key in bytes. + pub const SERIALIZED_SIZE: usize = Word::SERIALIZED_SIZE; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`StorageMapKey`] from the given word. + pub fn new(word: Word) -> Self { + Self::from_raw(word) + } + + /// Returns the storage map key based on an empty word. + pub fn empty() -> Self { + Self::from_raw(Word::empty()) + } + + /// Creates a [`StorageMapKey`] from a `u32` index. + /// + /// This is a convenience constructor for the common pattern of using sequential indices + /// as storage map keys, producing a key of `[idx, 0, 0, 0]`. + pub fn from_index(idx: u32) -> Self { + Self::from_raw(Word::from([idx, 0, 0, 0])) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Hashes this raw map key to produce a [`StorageMapKeyHash`]. + /// + /// Storage map keys are hashed before being inserted into the SMT to ensure a uniform + /// key distribution. + pub fn hash(&self) -> StorageMapKeyHash { + StorageMapKeyHash::from_raw(Hasher::hash_elements(self.0.as_elements())) + } +} + +impl From for Word { + fn from(key: StorageMapKey) -> Self { + key.0 + } +} + +impl core::fmt::Display for StorageMapKey { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_fmt(format_args!("{}", self.as_word())) + } +} + +impl Serializable for StorageMapKey { + fn write_into(&self, target: &mut W) { + target.write_many(self.as_word()); + } + + fn get_size_hint(&self) -> usize { + Self::SERIALIZED_SIZE + } +} + +impl Deserializable for StorageMapKey { + fn read_from(source: &mut R) -> Result { + let key = source.read()?; + Ok(StorageMapKey::from_raw(key)) + } +} + +// STORAGE MAP KEY HASH +// ================================================================================================ + +/// A hashed key for a [`StorageMap`](super::StorageMap). +/// +/// This is produced by hashing a [`StorageMapKey`] and is used as the actual key in the +/// underlying SMT. Wrapping the hashed key in a distinct type prevents accidentally using a raw +/// key where a hashed key is expected and vice-versa. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, WordWrapper)] +pub struct StorageMapKeyHash(Word); + +impl StorageMapKeyHash { + /// Returns the leaf index in the SMT for this hashed key. + pub fn to_leaf_index(&self) -> LeafIndex { + self.0.into() + } +} + +impl From for Word { + fn from(key: StorageMapKeyHash) -> Self { + key.0 + } +} + +impl From for StorageMapKeyHash { + fn from(key: StorageMapKey) -> Self { + key.hash() + } +} diff --git a/crates/miden-protocol/src/account/storage/map/mod.rs b/crates/miden-protocol/src/account/storage/map/mod.rs index 46d489a91c..66df39e909 100644 --- a/crates/miden-protocol/src/account/storage/map/mod.rs +++ b/crates/miden-protocol/src/account/storage/map/mod.rs @@ -4,12 +4,14 @@ use miden_core::EMPTY_WORD; use miden_crypto::merkle::EmptySubtreeRoots; use super::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, Word}; -use crate::Hasher; use crate::account::StorageMapDelta; use crate::crypto::merkle::InnerNodeInfo; use crate::crypto::merkle::smt::{LeafIndex, SMT_DEPTH, Smt, SmtLeaf}; use crate::errors::{AccountError, StorageMapError}; +mod key; +pub use key::{StorageMapKey, StorageMapKeyHash}; + mod partial; pub use partial::PartialStorageMap; @@ -43,11 +45,11 @@ pub const EMPTY_STORAGE_MAP_ROOT: Word = *EmptySubtreeRoots::entry(StorageMap::D pub struct StorageMap { /// The SMT where each key is the hashed original key. smt: Smt, - /// The entries of the map where the key is the raw user-chosen one. + /// The entries of the map that retains the original unhashed keys (i.e. [`StorageMapKey`]). /// /// It is an invariant of this type that the map's entries are always consistent with the SMT's /// entries and vice-versa. - entries: BTreeMap, + entries: BTreeMap, } impl StorageMap { @@ -79,8 +81,8 @@ impl StorageMap { /// /// Returns an error if: /// - the provided entries contain multiple values for the same key. - pub fn with_entries>( - entries: impl IntoIterator, + pub fn with_entries>( + entries: impl IntoIterator, ) -> Result { let mut map = BTreeMap::new(); @@ -98,8 +100,8 @@ impl StorageMap { } /// Creates a new [`StorageMap`] from the given map. For internal use. - fn from_btree_map(entries: BTreeMap) -> Self { - let hashed_keys_iter = entries.iter().map(|(key, value)| (Self::hash_key(*key), *value)); + fn from_btree_map(entries: BTreeMap) -> Self { + let hashed_keys_iter = entries.iter().map(|(key, value)| (key.hash().as_word(), *value)); let smt = Smt::with_entries(hashed_keys_iter) .expect("btree maps should not contain duplicate keys"); @@ -132,21 +134,20 @@ impl StorageMap { /// Returns the value corresponding to the key or [`Self::EMPTY_VALUE`] if the key is not /// associated with a value. - pub fn get(&self, raw_key: &Word) -> Word { - self.entries.get(raw_key).copied().unwrap_or_default() + pub fn get(&self, key: &StorageMapKey) -> Word { + self.entries.get(key).copied().unwrap_or_default() } - /// Returns an opening of the leaf associated with raw key. + /// Returns an opening of the leaf associated with the given key. /// /// Conceptually, an opening is a Merkle path to the leaf, as well as the leaf itself. - pub fn open(&self, raw_key: &Word) -> StorageMapWitness { - let hashed_map_key = Self::hash_key(*raw_key); - let smt_proof = self.smt.open(&hashed_map_key); - let value = self.entries.get(raw_key).copied().unwrap_or_default(); + pub fn open(&self, key: &StorageMapKey) -> StorageMapWitness { + let smt_proof = self.smt.open(&key.hash().as_word()); + let value = self.entries.get(key).copied().unwrap_or_default(); // SAFETY: The key value pair is guaranteed to be present in the provided proof since we // open its hashed version and because of the guarantees of the storage map. - StorageMapWitness::new_unchecked(smt_proof, [(*raw_key, value)]) + StorageMapWitness::new_unchecked(smt_proof, [(*key, value)]) } // ITERATORS @@ -158,9 +159,7 @@ impl StorageMap { } /// Returns an iterator over the key-value pairs in this storage map. - /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> impl Iterator { + pub fn entries(&self) -> impl Iterator { self.entries.iter() } @@ -176,16 +175,16 @@ impl StorageMap { /// [`Self::EMPTY_VALUE`] if no entry was previously present. /// /// If the provided `value` is [`Self::EMPTY_VALUE`] the entry will be removed. - pub fn insert(&mut self, raw_key: Word, value: Word) -> Result { + pub fn insert(&mut self, key: StorageMapKey, value: Word) -> Result { if value == EMPTY_WORD { - self.entries.remove(&raw_key); + self.entries.remove(&key); } else { - self.entries.insert(raw_key, value); + self.entries.insert(key, value); } - let hashed_key = Self::hash_key(raw_key); + let hashed_key = key.hash(); self.smt - .insert(hashed_key, value) + .insert(hashed_key.into(), value) .map_err(AccountError::MaxNumStorageMapLeavesExceeded) } @@ -200,27 +199,9 @@ impl StorageMap { } /// Consumes the map and returns the underlying map of entries. - pub fn into_entries(self) -> BTreeMap { + pub fn into_entries(self) -> BTreeMap { self.entries } - - // UTILITY FUNCTIONS - // -------------------------------------------------------------------------------------------- - - /// Hashes the given key to get the key of the SMT. - pub fn hash_key(raw_key: Word) -> Word { - Hasher::hash_elements(raw_key.as_elements()) - } - - /// Returns leaf index of a raw map key. - pub fn map_key_to_leaf_index(raw_key: Word) -> LeafIndex { - Self::hash_key(raw_key).into() - } - - /// Returns the leaf index of a map key. - pub fn hashed_map_key_to_leaf_index(hashed_map_key: Word) -> LeafIndex { - hashed_map_key.into() - } } impl Default for StorageMap { @@ -253,7 +234,14 @@ impl Deserializable for StorageMap { mod tests { use assert_matches::assert_matches; - use super::{Deserializable, EMPTY_STORAGE_MAP_ROOT, Serializable, StorageMap, Word}; + use super::{ + Deserializable, + EMPTY_STORAGE_MAP_ROOT, + Serializable, + StorageMap, + StorageMapKey, + Word, + }; use crate::errors::StorageMapError; #[test] @@ -264,9 +252,9 @@ mod tests { assert_eq!(storage_map_default, StorageMap::read_from_bytes(&bytes).unwrap()); // StorageMap with values - let storage_map_leaves_2: [(Word, Word); 2] = [ - (Word::from([101, 102, 103, 104u32]), Word::from([1, 2, 3, 4u32])), - (Word::from([105, 106, 107, 108u32]), Word::from([5, 6, 7, 8u32])), + let storage_map_leaves_2 = [ + (StorageMapKey::from_array([101, 102, 103, 104]), Word::from([1, 2, 3, 4u32])), + (StorageMapKey::from_array([105, 106, 107, 108]), Word::from([5, 6, 7, 8u32])), ]; let storage_map = StorageMap::with_entries(storage_map_leaves_2).unwrap(); assert_eq!(storage_map.num_entries(), 2); @@ -289,9 +277,9 @@ mod tests { #[test] fn account_storage_map_fails_on_duplicate_entries() { // StorageMap with values - let storage_map_leaves_2: [(Word, Word); 2] = [ - (Word::from([101, 102, 103, 104u32]), Word::from([1, 2, 3, 4u32])), - (Word::from([101, 102, 103, 104u32]), Word::from([5, 6, 7, 8u32])), + let storage_map_leaves_2 = [ + (StorageMapKey::from_array([101, 102, 103, 104]), Word::from([1, 2, 3, 4u32])), + (StorageMapKey::from_array([101, 102, 103, 104]), Word::from([5, 6, 7, 8u32])), ]; let error = StorageMap::with_entries(storage_map_leaves_2).unwrap_err(); diff --git a/crates/miden-protocol/src/account/storage/map/partial.rs b/crates/miden-protocol/src/account/storage/map/partial.rs index 55f1f408f7..e120c32d8d 100644 --- a/crates/miden-protocol/src/account/storage/map/partial.rs +++ b/crates/miden-protocol/src/account/storage/map/partial.rs @@ -1,12 +1,17 @@ use alloc::collections::BTreeMap; -use miden_core::utils::{Deserializable, Serializable}; use miden_crypto::Word; use miden_crypto::merkle::smt::{LeafIndex, PartialSmt, SMT_DEPTH, SmtLeaf, SmtProof}; use miden_crypto::merkle::{InnerNodeInfo, MerkleError}; -use crate::account::{StorageMap, StorageMapWitness}; -use crate::utils::serde::{ByteReader, DeserializationError}; +use crate::account::{StorageMap, StorageMapKey, StorageMapWitness}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of a [`StorageMap`], containing only proofs for a subset of the /// key-value pairs. @@ -25,11 +30,11 @@ use crate::utils::serde::{ByteReader, DeserializationError}; #[derive(Clone, Debug, PartialEq, Eq, Default)] pub struct PartialStorageMap { partial_smt: PartialSmt, - /// The entries of the map where the key is the raw user-chosen one. + /// The entries of the map that retains the original unhashed keys (i.e. [`StorageMapKey`]). /// /// It is an invariant of this type that the map's entries are always consistent with the /// partial SMT's entries and vice-versa. - entries: BTreeMap, + entries: BTreeMap, } impl PartialStorageMap { @@ -97,13 +102,13 @@ impl PartialStorageMap { /// - a non-empty [`Word`] if the key is tracked by this map and exists in it, /// - [`Word::empty`] if the key is tracked by this map and does not exist, /// - `None` if the key is not tracked by this map. - pub fn get(&self, raw_key: &Word) -> Option { - let hashed_key = StorageMap::hash_key(*raw_key); + pub fn get(&self, key: &StorageMapKey) -> Option { + let hash_word = key.hash().as_word(); // This returns an error if the key is not tracked which we map to a `None`. - self.partial_smt.get_value(&hashed_key).ok() + self.partial_smt.get_value(&hash_word).ok() } - /// Returns an opening of the leaf associated with the raw key. + /// Returns an opening of the leaf associated with the given key. /// /// Conceptually, an opening is a Merkle path to the leaf, as well as the leaf itself. /// @@ -111,14 +116,13 @@ impl PartialStorageMap { /// /// Returns an error if: /// - the key is not tracked by this partial storage map. - pub fn open(&self, raw_key: &Word) -> Result { - let hashed_key = StorageMap::hash_key(*raw_key); - let smt_proof = self.partial_smt.open(&hashed_key)?; - let value = self.entries.get(raw_key).copied().unwrap_or_default(); + pub fn open(&self, key: &StorageMapKey) -> Result { + let smt_proof = self.partial_smt.open(&key.hash().as_word())?; + let value = self.entries.get(key).copied().unwrap_or_default(); // SAFETY: The key value pair is guaranteed to be present in the provided proof since we // open its hashed version and because of the guarantees of the partial storage map. - Ok(StorageMapWitness::new_unchecked(smt_proof, [(*raw_key, value)])) + Ok(StorageMapWitness::new_unchecked(smt_proof, [(*key, value)])) } // ITERATORS @@ -130,9 +134,7 @@ impl PartialStorageMap { } /// Returns an iterator over the key-value pairs in this storage map. - /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> impl Iterator { + pub fn entries(&self) -> impl Iterator { self.entries.iter() } @@ -152,7 +154,7 @@ impl PartialStorageMap { } impl Serializable for PartialStorageMap { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write(&self.partial_smt); target.write_usize(self.entries.len()); target.write_many(self.entries.keys()); @@ -167,8 +169,8 @@ impl Deserializable for PartialStorageMap { let num_entries: usize = source.read()?; for _ in 0..num_entries { - let key: Word = source.read()?; - let hashed_map_key = StorageMap::hash_key(key); + let key: StorageMapKey = source.read()?; + let hashed_map_key: Word = key.hash().into(); let value = partial_smt.get_value(&hashed_map_key).map_err(|err| { DeserializationError::InvalidValue(format!( "failed to find map key {key} in partial SMT: {err}" diff --git a/crates/miden-protocol/src/account/storage/map/witness.rs b/crates/miden-protocol/src/account/storage/map/witness.rs index f70a8359af..e818bd9755 100644 --- a/crates/miden-protocol/src/account/storage/map/witness.rs +++ b/crates/miden-protocol/src/account/storage/map/witness.rs @@ -4,7 +4,7 @@ use miden_crypto::merkle::InnerNodeInfo; use miden_crypto::merkle::smt::SmtProof; use crate::Word; -use crate::account::StorageMap; +use crate::account::StorageMapKey; use crate::errors::StorageMapError; /// A witness of an asset in a [`StorageMap`](super::StorageMap). @@ -26,7 +26,7 @@ pub struct StorageMapWitness { /// /// It is an invariant of this type that the map's entries are always consistent with the SMT's /// entries and vice-versa. - entries: BTreeMap, + entries: BTreeMap, } impl StorageMapWitness { @@ -41,21 +41,20 @@ impl StorageMapWitness { /// - Any of the map keys is not contained in the proof. pub fn new( proof: SmtProof, - raw_keys: impl IntoIterator, + keys: impl IntoIterator, ) -> Result { let mut entries = BTreeMap::new(); - for raw_key in raw_keys.into_iter() { - let hashed_map_key = StorageMap::hash_key(raw_key); - let value = - proof.get(&hashed_map_key).ok_or(StorageMapError::MissingKey { raw_key })?; - entries.insert(raw_key, value); + for key in keys.into_iter() { + let hashed_map_key = key.hash().as_word(); + let value = proof.get(&hashed_map_key).ok_or(StorageMapError::MissingKey { key })?; + entries.insert(key, value); } Ok(Self { proof, entries }) } - /// Creates a new [`StorageMapWitness`] from an SMT proof and a set of raw key value pairs. + /// Creates a new [`StorageMapWitness`] from an SMT proof and a set of key value pairs. /// /// # Warning /// @@ -63,11 +62,11 @@ impl StorageMapWitness { /// details. pub fn new_unchecked( proof: SmtProof, - raw_key_values: impl IntoIterator, + key_values: impl IntoIterator, ) -> Self { Self { proof, - entries: raw_key_values.into_iter().collect(), + entries: key_values.into_iter().collect(), } } @@ -83,15 +82,13 @@ impl StorageMapWitness { /// - a non-empty [`Word`] if the key is tracked by this witness and exists in it, /// - [`Word::empty`] if the key is tracked by this witness and does not exist, /// - `None` if the key is not tracked by this witness. - pub fn get(&self, raw_key: &Word) -> Option { - let hashed_key = StorageMap::hash_key(*raw_key); - self.proof.get(&hashed_key) + pub fn get(&self, key: StorageMapKey) -> Option { + let hash_word = key.hash().as_word(); + self.proof.get(&hash_word) } /// Returns an iterator over the key-value pairs in this witness. - /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> impl Iterator { + pub fn entries(&self) -> impl Iterator { self.entries.iter() } @@ -99,7 +96,7 @@ impl StorageMapWitness { pub fn authenticated_nodes(&self) -> impl Iterator + '_ { self.proof .path() - .authenticated_nodes(self.proof.leaf().index().value(), self.proof.leaf().hash()) + .authenticated_nodes(self.proof.leaf().index().position(), self.proof.leaf().hash()) .expect("leaf index is u64 and should be less than 2^SMT_DEPTH") } } @@ -120,7 +117,7 @@ mod tests { #[test] fn creating_witness_fails_on_missing_key() { // Create a storage map with one key-value pair - let key1 = Word::from([1, 2, 3, 4u32]); + let key1 = StorageMapKey::from_array([1, 2, 3, 4]); let value1 = Word::from([10, 20, 30, 40u32]); let entries = [(key1, value1)]; let storage_map = StorageMap::with_entries(entries).unwrap(); @@ -129,11 +126,11 @@ mod tests { let proof = storage_map.open(&key1).into(); // Try to create a witness for a different key that's not in the proof - let missing_key = Word::from([5, 6, 7, 8u32]); + let missing_key = StorageMapKey::from_array([5, 6, 7, 8u32]); let result = StorageMapWitness::new(proof, [missing_key]); - assert_matches!(result, Err(StorageMapError::MissingKey { raw_key }) => { - assert_eq!(raw_key, missing_key); + assert_matches!(result, Err(StorageMapError::MissingKey { key }) => { + assert_eq!(key, missing_key); }); } } diff --git a/crates/miden-protocol/src/account/storage/mod.rs b/crates/miden-protocol/src/account/storage/mod.rs index 48fdf48164..4f3217f905 100644 --- a/crates/miden-protocol/src/account/storage/mod.rs +++ b/crates/miden-protocol/src/account/storage/mod.rs @@ -19,7 +19,7 @@ mod slot; pub use slot::{StorageSlot, StorageSlotContent, StorageSlotId, StorageSlotName, StorageSlotType}; mod map; -pub use map::{PartialStorageMap, StorageMap, StorageMapWitness}; +pub use map::{PartialStorageMap, StorageMap, StorageMapKey, StorageMapKeyHash, StorageMapWitness}; mod header; pub use header::{AccountStorageHeader, StorageSlotHeader}; @@ -76,7 +76,7 @@ impl AccountStorage { } // Unstable sort is fine because we require all names to be unique. - slots.sort_unstable(); + slots.sort_unstable_by(|a, b| a.name().cmp(b.name())); // Check for slot name uniqueness by checking each neighboring slot's IDs. This is // sufficient because the slots are sorted. @@ -191,7 +191,7 @@ impl AccountStorage { self.get(slot_name) .ok_or_else(|| AccountError::StorageSlotNameNotFound { slot_name: slot_name.clone() }) .and_then(|slot| match slot.content() { - StorageSlotContent::Map(map) => Ok(map.get(&key)), + StorageSlotContent::Map(map) => Ok(map.get(&StorageMapKey::from_raw(key))), _ => Err(AccountError::StorageSlotNotMap(slot_name.clone())), }) } @@ -271,7 +271,7 @@ impl AccountStorage { pub fn set_map_item( &mut self, slot_name: &StorageSlotName, - raw_key: Word, + key: StorageMapKey, value: Word, ) -> Result<(Word, Word), AccountError> { let slot = self.get_mut(slot_name).ok_or_else(|| { @@ -284,7 +284,7 @@ impl AccountStorage { let old_root = storage_map.root(); - let old_value = storage_map.insert(raw_key, value)?; + let old_value = storage_map.insert(key, value)?; Ok((old_root, old_value)) } @@ -348,7 +348,7 @@ impl Serializable for AccountStorage { impl Deserializable for AccountStorage { fn read_from(source: &mut R) -> Result { let num_slots = source.read_u8()? as usize; - let slots = source.read_many::(num_slots)?; + let slots = source.read_many_iter::(num_slots)?.collect::>()?; Self::new(slots).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } @@ -422,7 +422,7 @@ mod tests { assert_eq!(name, slot_name0); }); - slots.sort_unstable(); + slots.sort_unstable_by(|a, b| a.name().cmp(b.name())); let err = AccountStorageHeader::new(slots.iter().map(StorageSlotHeader::from).collect()) .unwrap_err(); diff --git a/crates/miden-protocol/src/account/storage/partial.rs b/crates/miden-protocol/src/account/storage/partial.rs index 41e2500bac..7d5ef2d18d 100644 --- a/crates/miden-protocol/src/account/storage/partial.rs +++ b/crates/miden-protocol/src/account/storage/partial.rs @@ -1,6 +1,5 @@ use alloc::collections::{BTreeMap, BTreeSet}; -use miden_core::utils::{Deserializable, Serializable}; use miden_crypto::Word; use miden_crypto::merkle::InnerNodeInfo; use miden_crypto::merkle::smt::SmtLeaf; @@ -8,6 +7,13 @@ use miden_crypto::merkle::smt::SmtLeaf; use super::{AccountStorage, AccountStorageHeader, StorageSlotContent}; use crate::account::PartialStorageMap; use crate::errors::AccountError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of an account storage, containing only a subset of the storage data. /// @@ -133,16 +139,14 @@ impl PartialStorage { } impl Serializable for PartialStorage { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write(&self.header); target.write(&self.maps); } } impl Deserializable for PartialStorage { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let header: AccountStorageHeader = source.read()?; let map_smts: BTreeMap = source.read()?; @@ -163,14 +167,15 @@ mod tests { PartialStorage, PartialStorageMap, StorageMap, + StorageMapKey, StorageSlot, StorageSlotName, }; #[test] pub fn new_partial_storage() -> anyhow::Result<()> { - let map_key_present: Word = [1u64, 2, 3, 4].try_into()?; - let map_key_absent: Word = [9u64, 12, 18, 3].try_into()?; + let map_key_present = StorageMapKey::from_array([1, 2, 3, 4]); + let map_key_absent = StorageMapKey::from_array([9, 12, 18, 3]); let mut map_1 = StorageMap::new(); map_1.insert(map_key_absent, Word::try_from([1u64, 2, 3, 2])?).unwrap(); diff --git a/crates/miden-protocol/src/account/storage/slot/slot_content.rs b/crates/miden-protocol/src/account/storage/slot/slot_content.rs index 746391de54..dfc37017dd 100644 --- a/crates/miden-protocol/src/account/storage/slot/slot_content.rs +++ b/crates/miden-protocol/src/account/storage/slot/slot_content.rs @@ -1,10 +1,15 @@ use miden_core::EMPTY_WORD; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use crate::account::StorageSlotType; use crate::account::storage::map::EMPTY_STORAGE_MAP_ROOT; use crate::account::storage::{StorageMap, Word}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // STORAGE SLOT CONTENT // ================================================================================================ @@ -116,9 +121,8 @@ impl Deserializable for StorageSlotContent { #[cfg(test)] mod tests { - use miden_core::utils::{Deserializable, Serializable}; - use crate::account::AccountStorage; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_serde_storage_slot_content() { diff --git a/crates/miden-protocol/src/account/storage/slot/slot_id.rs b/crates/miden-protocol/src/account/storage/slot/slot_id.rs index b22b46b2dd..b589cfef6d 100644 --- a/crates/miden-protocol/src/account/storage/slot/slot_id.rs +++ b/crates/miden-protocol/src/account/storage/slot/slot_id.rs @@ -61,17 +61,17 @@ impl StorageSlotId { /// Returns the [`StorageSlotId`]'s felts encoded into a u128. fn as_u128(&self) -> u128 { let mut le_bytes = [0_u8; 16]; - le_bytes[..8].copy_from_slice(&self.suffix().as_int().to_le_bytes()); - le_bytes[8..].copy_from_slice(&self.prefix().as_int().to_le_bytes()); + le_bytes[..8].copy_from_slice(&self.suffix().as_canonical_u64().to_le_bytes()); + le_bytes[8..].copy_from_slice(&self.prefix().as_canonical_u64().to_le_bytes()); u128::from_le_bytes(le_bytes) } } impl Ord for StorageSlotId { fn cmp(&self, other: &Self) -> Ordering { - match self.prefix.as_int().cmp(&other.prefix.as_int()) { + match self.prefix.as_canonical_u64().cmp(&other.prefix.as_canonical_u64()) { ord @ Ordering::Less | ord @ Ordering::Greater => ord, - Ordering::Equal => self.suffix.as_int().cmp(&other.suffix.as_int()), + Ordering::Equal => self.suffix.as_canonical_u64().cmp(&other.suffix.as_canonical_u64()), } } } @@ -84,8 +84,8 @@ impl PartialOrd for StorageSlotId { impl Hash for StorageSlotId { fn hash(&self, state: &mut H) { - self.suffix.inner().hash(state); - self.prefix.inner().hash(state); + self.suffix.as_canonical_u64().hash(state); + self.prefix.as_canonical_u64().hash(state); } } diff --git a/crates/miden-protocol/src/account/storage/slot/slot_name.rs b/crates/miden-protocol/src/account/storage/slot/slot_name.rs index 2dbb80180f..08892d5de1 100644 --- a/crates/miden-protocol/src/account/storage/slot/slot_name.rs +++ b/crates/miden-protocol/src/account/storage/slot/slot_name.rs @@ -5,7 +5,13 @@ use core::str::FromStr; use crate::account::storage::slot::StorageSlotId; use crate::errors::StorageSlotNameError; -use crate::utils::serde::{ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The name of an account storage slot. /// @@ -100,7 +106,7 @@ impl StorageSlotName { /// We must check the validity of a slot name against the raw bytes of the UTF-8 string because /// typical character APIs are not available in a const version. We can do this because any byte /// in a UTF-8 string that is an ASCII character never represents anything other than such a - /// character, even though UTF-8 can contain multibyte sequences: + /// character, even though UTF-8 can contain multi-byte sequences: /// /// > UTF-8, the object of this memo, has a one-octet encoding unit. It uses all bits of an /// > octet, but has the quality of preserving the full US-ASCII range: US-ASCII characters @@ -245,11 +251,9 @@ impl Serializable for StorageSlotName { } impl Deserializable for StorageSlotName { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let len = source.read_u8()?; - let name = source.read_many(len as usize)?; + let name = source.read_many_iter(len as usize)?.collect::>()?; String::from_utf8(name) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) .and_then(|name| { diff --git a/crates/miden-protocol/src/account/storage/slot/storage_slot.rs b/crates/miden-protocol/src/account/storage/slot/storage_slot.rs index 9fce734847..37da4c86a9 100644 --- a/crates/miden-protocol/src/account/storage/slot/storage_slot.rs +++ b/crates/miden-protocol/src/account/storage/slot/storage_slot.rs @@ -1,6 +1,13 @@ use crate::Word; use crate::account::storage::slot::StorageSlotId; use crate::account::{StorageMap, StorageSlotContent, StorageSlotName, StorageSlotType}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// An individual storage slot in [`AccountStorage`](crate::account::AccountStorage). /// @@ -101,23 +108,11 @@ impl StorageSlot { } } -impl Ord for StorageSlot { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.name().cmp(&other.name) - } -} - -impl PartialOrd for StorageSlot { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - // SERIALIZATION // ================================================================================================ -impl crate::utils::serde::Serializable for StorageSlot { - fn write_into(&self, target: &mut W) { +impl Serializable for StorageSlot { + fn write_into(&self, target: &mut W) { target.write(&self.name); target.write(&self.content); } @@ -127,10 +122,8 @@ impl crate::utils::serde::Serializable for StorageSlot { } } -impl crate::utils::serde::Deserializable for StorageSlot { - fn read_from( - source: &mut R, - ) -> Result { +impl Deserializable for StorageSlot { + fn read_from(source: &mut R) -> Result { let name: StorageSlotName = source.read()?; let content: StorageSlotContent = source.read()?; diff --git a/crates/miden-protocol/src/account/storage/slot/type.rs b/crates/miden-protocol/src/account/storage/slot/type.rs index 25bd5a3a60..baa12f204c 100644 --- a/crates/miden-protocol/src/account/storage/slot/type.rs +++ b/crates/miden-protocol/src/account/storage/slot/type.rs @@ -114,10 +114,9 @@ impl Deserializable for StorageSlotType { #[cfg(test)] mod tests { - use miden_core::utils::{Deserializable, Serializable}; - + use crate::Felt; use crate::account::StorageSlotType; - use crate::{Felt, FieldElement}; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_serde_account_storage_slot_type() { diff --git a/crates/miden-protocol/src/address/address_id.rs b/crates/miden-protocol/src/address/address_id.rs index 9cad626bfb..70c8eb14df 100644 --- a/crates/miden-protocol/src/address/address_id.rs +++ b/crates/miden-protocol/src/address/address_id.rs @@ -2,12 +2,17 @@ use alloc::string::ToString; use bech32::Bech32m; use bech32::primitives::decode::CheckedHrpstring; -use miden_processor::DeserializationError; use crate::account::AccountId; use crate::address::{AddressType, NetworkId}; use crate::errors::{AddressError, Bech32Error}; -use crate::utils::serde::{ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The identifier of an [`Address`](super::Address). /// @@ -82,9 +87,7 @@ impl Serializable for AddressId { } impl Deserializable for AddressId { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let address_type: u8 = source.read_u8()?; let address_type = AddressType::try_from(address_type) .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; diff --git a/crates/miden-protocol/src/address/mod.rs b/crates/miden-protocol/src/address/mod.rs index 06e3ceba6a..8a99b8eb0d 100644 --- a/crates/miden-protocol/src/address/mod.rs +++ b/crates/miden-protocol/src/address/mod.rs @@ -12,13 +12,18 @@ mod network_id; use alloc::string::String; pub use interface::AddressInterface; -use miden_processor::DeserializationError; pub use network_id::{CustomNetworkId, NetworkId}; use crate::crypto::ies::SealingKey; use crate::errors::AddressError; use crate::note::NoteTag; -use crate::utils::serde::{ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; mod address_id; pub use address_id::AddressId; @@ -184,9 +189,7 @@ impl Serializable for Address { } impl Deserializable for Address { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let identifier: AddressId = source.read()?; let routing_params: Option = source.read()?; diff --git a/crates/miden-protocol/src/address/routing_parameters.rs b/crates/miden-protocol/src/address/routing_parameters.rs index c10d8792d2..ed0a45fe8a 100644 --- a/crates/miden-protocol/src/address/routing_parameters.rs +++ b/crates/miden-protocol/src/address/routing_parameters.rs @@ -55,8 +55,8 @@ const K256_PUBLIC_KEY_LENGTH: usize = 33; /// Discriminants for encryption key variants. const ENCRYPTION_KEY_X25519_XCHACHA20POLY1305: u8 = 0; const ENCRYPTION_KEY_K256_XCHACHA20POLY1305: u8 = 1; -const ENCRYPTION_KEY_X25519_AEAD_RPO: u8 = 2; -const ENCRYPTION_KEY_K256_AEAD_RPO: u8 = 3; +const ENCRYPTION_KEY_X25519_AEAD_POSEIDON2: u8 = 2; +const ENCRYPTION_KEY_K256_AEAD_POSEIDON2: u8 = 3; /// Parameters that define how a sender should route a note to the [`AddressId`](super::AddressId) /// in an [`Address`](super::Address). @@ -252,7 +252,8 @@ impl Serializable for RoutingParameters { impl Deserializable for RoutingParameters { fn read_from(source: &mut R) -> Result { let num_bytes = source.read_u16()?; - let bytes: Vec = source.read_many(num_bytes as usize)?; + let bytes: Vec = + source.read_many_iter(num_bytes as usize)?.collect::>()?; Self::decode_from_bytes(bytes.into_iter()) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) @@ -316,12 +317,12 @@ fn encode_encryption_key(key: &SealingKey, encoded: &mut Vec) { encoded.push(ENCRYPTION_KEY_K256_XCHACHA20POLY1305); encoded.extend(&pk.to_bytes()); }, - SealingKey::X25519AeadRpo(pk) => { - encoded.push(ENCRYPTION_KEY_X25519_AEAD_RPO); + SealingKey::X25519AeadPoseidon2(pk) => { + encoded.push(ENCRYPTION_KEY_X25519_AEAD_POSEIDON2); encoded.extend(&pk.to_bytes()); }, - SealingKey::K256AeadRpo(pk) => { - encoded.push(ENCRYPTION_KEY_K256_AEAD_RPO); + SealingKey::K256AeadPoseidon2(pk) => { + encoded.push(ENCRYPTION_KEY_K256_AEAD_POSEIDON2); encoded.extend(&pk.to_bytes()); }, } @@ -346,10 +347,12 @@ fn decode_encryption_key( ENCRYPTION_KEY_K256_XCHACHA20POLY1305 => { SealingKey::K256XChaCha20Poly1305(read_k256_pub_key(byte_iter)?) }, - ENCRYPTION_KEY_X25519_AEAD_RPO => { - SealingKey::X25519AeadRpo(read_x25519_pub_key(byte_iter)?) + ENCRYPTION_KEY_X25519_AEAD_POSEIDON2 => { + SealingKey::X25519AeadPoseidon2(read_x25519_pub_key(byte_iter)?) + }, + ENCRYPTION_KEY_K256_AEAD_POSEIDON2 => { + SealingKey::K256AeadPoseidon2(read_k256_pub_key(byte_iter)?) }, - ENCRYPTION_KEY_K256_AEAD_RPO => SealingKey::K256AeadRpo(read_k256_pub_key(byte_iter)?), other => { return Err(AddressError::decode_error(format!( "unknown encryption key variant: {}", @@ -554,21 +557,21 @@ mod tests { test_encryption_key_roundtrip(encryption_key)?; } - // Test X25519AeadRpo + // Test X25519AeadPoseidon2 { use crate::crypto::dsa::eddsa_25519_sha512::SecretKey; let secret_key = SecretKey::with_rng(&mut rand::rng()); let public_key = secret_key.public_key(); - let encryption_key = SealingKey::X25519AeadRpo(public_key); + let encryption_key = SealingKey::X25519AeadPoseidon2(public_key); test_encryption_key_roundtrip(encryption_key)?; } - // Test K256AeadRpo + // Test K256AeadPoseidon2 { use crate::crypto::dsa::ecdsa_k256_keccak::SecretKey; let secret_key = SecretKey::with_rng(&mut rand::rng()); let public_key = secret_key.public_key(); - let encryption_key = SealingKey::K256AeadRpo(public_key); + let encryption_key = SealingKey::K256AeadPoseidon2(public_key); test_encryption_key_roundtrip(encryption_key)?; } diff --git a/crates/miden-protocol/src/asset/asset_amount.rs b/crates/miden-protocol/src/asset/asset_amount.rs new file mode 100644 index 0000000000..437d5a6977 --- /dev/null +++ b/crates/miden-protocol/src/asset/asset_amount.rs @@ -0,0 +1,200 @@ +use alloc::string::ToString; +use core::fmt; + +use super::AssetError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// ASSET AMOUNT +// ================================================================================================ + +/// A validated amount for a [`super::FungibleAsset`]. +/// +/// The inner value is guaranteed to be at most [`AssetAmount::MAX`]. +#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct AssetAmount(u64); + +impl AssetAmount { + /// The maximum amount a fungible asset can represent. + /// + /// This number was chosen so that it can be represented as a positive and negative number in a + /// field element. See `account_delta.masm` for more details on how this number was chosen. + pub const MAX: Self = Self(2u64.pow(63) - 2u64.pow(31)); + + /// An amount of zero. + pub const ZERO: Self = Self(0); + + /// Creates a new [`AssetAmount`] after validating that `amount` does not exceed + /// [`Self::MAX`]. + /// + /// # Errors + /// + /// Returns [`AssetError::FungibleAssetAmountTooBig`] if `amount` exceeds [`Self::MAX`]. + pub fn new(amount: u64) -> Result { + if amount > Self::MAX.0 { + return Err(AssetError::FungibleAssetAmountTooBig(amount)); + } + Ok(Self(amount)) + } + + /// Returns the inner `u64` value. + pub const fn inner(&self) -> u64 { + self.0 + } + + /// Creates a new [`AssetAmount`] without validating bounds. + /// + /// # Safety + /// + /// The caller must ensure that `amount <= AssetAmount::MAX`. + pub(crate) const fn new_unchecked(amount: u64) -> Self { + Self(amount) + } +} + +// TRAIT IMPLEMENTATIONS +// ================================================================================================ + +impl From for AssetAmount { + fn from(amount: u8) -> Self { + Self(amount as u64) + } +} + +impl From for AssetAmount { + fn from(amount: u16) -> Self { + Self(amount as u64) + } +} + +impl From for AssetAmount { + fn from(amount: u32) -> Self { + Self(amount as u64) + } +} + +impl TryFrom for AssetAmount { + type Error = AssetError; + + fn try_from(amount: u64) -> Result { + Self::new(amount) + } +} + +impl From for u64 { + fn from(amount: AssetAmount) -> Self { + amount.0 + } +} + +impl fmt::Display for AssetAmount { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +// SERIALIZATION +// ================================================================================================ + +impl Serializable for AssetAmount { + fn write_into(&self, target: &mut W) { + target.write(self.0); + } + + fn get_size_hint(&self) -> usize { + self.0.get_size_hint() + } +} + +impl Deserializable for AssetAmount { + fn read_from(source: &mut R) -> Result { + let amount: u64 = source.read()?; + Self::new(amount).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn asset_amount_max_value() { + let max = AssetAmount::MAX; + assert_eq!(max.inner(), 2u64.pow(63) - 2u64.pow(31)); + } + + #[test] + fn asset_amount_new_valid() { + assert!(AssetAmount::new(0).is_ok()); + assert!(AssetAmount::new(100).is_ok()); + assert!(AssetAmount::new(AssetAmount::MAX.inner()).is_ok()); + } + + #[test] + fn asset_amount_new_exceeds_max() { + assert!(AssetAmount::new(AssetAmount::MAX.inner() + 1).is_err()); + assert!(AssetAmount::new(u64::MAX).is_err()); + } + + #[test] + fn asset_amount_from_small_types() { + let a: AssetAmount = 42u8.into(); + assert_eq!(a.inner(), 42); + + let b: AssetAmount = 1000u16.into(); + assert_eq!(b.inner(), 1000); + + let c: AssetAmount = 1_000_000u32.into(); + assert_eq!(c.inner(), 1_000_000); + } + + #[test] + fn asset_amount_try_from_u64() { + assert!(AssetAmount::try_from(100u64).is_ok()); + assert!(AssetAmount::try_from(AssetAmount::MAX.inner() + 1).is_err()); + } + + #[test] + fn asset_amount_into_u64() { + let amount = AssetAmount::new(42).unwrap(); + let val: u64 = amount.into(); + assert_eq!(val, 42); + } + + #[test] + fn asset_amount_display() { + let amount = AssetAmount::new(12345).unwrap(); + assert_eq!(format!("{amount}"), "12345"); + } + + #[test] + fn asset_amount_ordering() { + let a = AssetAmount::new(10).unwrap(); + let b = AssetAmount::new(20).unwrap(); + assert!(a < b); + assert!(b > a); + assert_eq!(a, AssetAmount::new(10).unwrap()); + } + + #[test] + fn asset_amount_default_is_zero() { + assert_eq!(AssetAmount::default(), AssetAmount::ZERO); + assert_eq!(AssetAmount::default().inner(), 0); + } + + #[test] + fn asset_amount_serde_roundtrip() { + let amount = AssetAmount::new(999).unwrap(); + let bytes = amount.to_bytes(); + let restored = AssetAmount::read_from_bytes(&bytes).unwrap(); + assert_eq!(amount, restored); + } +} diff --git a/crates/miden-protocol/src/asset/asset_callbacks.rs b/crates/miden-protocol/src/asset/asset_callbacks.rs new file mode 100644 index 0000000000..3edf662f2f --- /dev/null +++ b/crates/miden-protocol/src/asset/asset_callbacks.rs @@ -0,0 +1,105 @@ +use alloc::vec::Vec; + +use crate::Word; +use crate::account::{StorageSlot, StorageSlotName}; +use crate::utils::sync::LazyLock; + +// CONSTANTS +// ================================================================================================ + +static ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_SLOT_NAME: LazyLock = + LazyLock::new(|| { + StorageSlotName::new("miden::protocol::faucet::callback::on_before_asset_added_to_account") + .expect("storage slot name should be valid") + }); + +static ON_BEFORE_ASSET_ADDED_TO_NOTE_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::protocol::faucet::callback::on_before_asset_added_to_note") + .expect("storage slot name should be valid") +}); + +// ASSET CALLBACKS +// ================================================================================================ + +/// Configures the callback procedure roots for asset callbacks. +/// +/// ## Storage Layout +/// +/// - [`Self::on_before_asset_added_to_account_slot()`]: Stores the procedure root of the +/// `on_before_asset_added_to_account` callback. This storage slot is only added if the callback +/// procedure root is not the empty word. +/// - [`Self::on_before_asset_added_to_note_slot()`]: Stores the procedure root of the +/// `on_before_asset_added_to_note` callback. This storage slot is only added if the callback +/// procedure root is not the empty word. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct AssetCallbacks { + on_before_asset_added_to_account: Word, + on_before_asset_added_to_note: Word, +} + +impl AssetCallbacks { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`AssetCallbacks`] with all callbacks set to the empty word. + pub fn new() -> Self { + Self::default() + } + + /// Sets the `on_before_asset_added_to_account` callback procedure root. + pub fn on_before_asset_added_to_account(mut self, proc_root: Word) -> Self { + self.on_before_asset_added_to_account = proc_root; + self + } + + /// Sets the `on_before_asset_added_to_note` callback procedure root. + pub fn on_before_asset_added_to_note(mut self, proc_root: Word) -> Self { + self.on_before_asset_added_to_note = proc_root; + self + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the [`StorageSlotName`] where the `on_before_asset_added_to_account` callback + /// procedure root is stored. + pub fn on_before_asset_added_to_account_slot() -> &'static StorageSlotName { + &ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where the `on_before_asset_added_to_note` callback + /// procedure root is stored. + pub fn on_before_asset_added_to_note_slot() -> &'static StorageSlotName { + &ON_BEFORE_ASSET_ADDED_TO_NOTE_SLOT_NAME + } + + /// Returns the procedure root of the `on_before_asset_added_to_account` callback. + pub fn on_before_asset_added_to_account_proc_root(&self) -> Word { + self.on_before_asset_added_to_account + } + + /// Returns the procedure root of the `on_before_asset_added_to_note` callback. + pub fn on_before_asset_added_to_note_proc_root(&self) -> Word { + self.on_before_asset_added_to_note + } + + pub fn into_storage_slots(self) -> Vec { + let mut slots = Vec::new(); + + if !self.on_before_asset_added_to_account.is_empty() { + slots.push(StorageSlot::with_value( + AssetCallbacks::on_before_asset_added_to_account_slot().clone(), + self.on_before_asset_added_to_account, + )); + } + + if !self.on_before_asset_added_to_note.is_empty() { + slots.push(StorageSlot::with_value( + AssetCallbacks::on_before_asset_added_to_note_slot().clone(), + self.on_before_asset_added_to_note, + )); + } + + slots + } +} diff --git a/crates/miden-protocol/src/asset/asset_callbacks_flag.rs b/crates/miden-protocol/src/asset/asset_callbacks_flag.rs new file mode 100644 index 0000000000..c5dfa620e4 --- /dev/null +++ b/crates/miden-protocol/src/asset/asset_callbacks_flag.rs @@ -0,0 +1,68 @@ +use alloc::string::ToString; + +use crate::errors::AssetError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +/// The flag in an [`AssetVaultKey`](super::AssetVaultKey) that indicates whether +/// [`AssetCallbacks`](super::AssetCallbacks) are enabled for this asset. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +#[repr(u8)] +pub enum AssetCallbackFlag { + #[default] + Disabled = Self::DISABLED, + + Enabled = Self::ENABLED, +} + +impl AssetCallbackFlag { + const DISABLED: u8 = 0; + const ENABLED: u8 = 1; + + /// The serialized size of an [`AssetCallbackFlag`] in bytes. + pub const SERIALIZED_SIZE: usize = core::mem::size_of::(); + + /// Encodes the callbacks setting as a `u8`. + pub const fn as_u8(&self) -> u8 { + *self as u8 + } +} + +impl TryFrom for AssetCallbackFlag { + type Error = AssetError; + + /// Decodes a callbacks setting from a `u8`. + /// + /// # Errors + /// + /// Returns an error if the value is not a valid callbacks encoding. + fn try_from(value: u8) -> Result { + match value { + Self::DISABLED => Ok(Self::Disabled), + Self::ENABLED => Ok(Self::Enabled), + _ => Err(AssetError::InvalidAssetCallbackFlag(value)), + } + } +} + +impl Serializable for AssetCallbackFlag { + fn write_into(&self, target: &mut W) { + target.write_u8(self.as_u8()); + } + + fn get_size_hint(&self) -> usize { + AssetCallbackFlag::SERIALIZED_SIZE + } +} + +impl Deserializable for AssetCallbackFlag { + fn read_from(source: &mut R) -> Result { + Self::try_from(source.read_u8()?) + .map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} diff --git a/crates/miden-protocol/src/asset/fungible.rs b/crates/miden-protocol/src/asset/fungible.rs index 95e7dfdcb6..186722be61 100644 --- a/crates/miden-protocol/src/asset/fungible.rs +++ b/crates/miden-protocol/src/asset/fungible.rs @@ -1,10 +1,11 @@ -use alloc::boxed::Box; use alloc::string::ToString; use core::fmt; use super::vault::AssetVaultKey; -use super::{AccountType, Asset, AssetError, Felt, Word, ZERO, is_not_a_non_fungible_asset}; -use crate::account::{AccountId, AccountIdPrefix}; +use super::{AccountType, Asset, AssetAmount, AssetCallbackFlag, AssetError, Word}; +use crate::Felt; +use crate::account::AccountId; +use crate::asset::AssetId; use crate::utils::serde::{ ByteReader, ByteWriter, @@ -19,10 +20,14 @@ use crate::utils::serde::{ /// /// A fungible asset consists of a faucet ID of the faucet which issued the asset as well as the /// asset amount. Asset amount is guaranteed to be 2^63 - 1 or smaller. +/// +/// The fungible asset can have callbacks to the faucet enabled or disabled, depending on +/// [`AssetCallbackFlag`]. See [`AssetCallbacks`](crate::asset::AssetCallbacks) for more details. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct FungibleAsset { faucet_id: AccountId, - amount: u64, + amount: AssetAmount, + callbacks: AssetCallbackFlag, } impl FungibleAsset { @@ -30,34 +35,83 @@ impl FungibleAsset { // -------------------------------------------------------------------------------------------- /// Specifies the maximum amount a fungible asset can represent. /// - /// This number was chosen so that it can be represented as a positive and negative number in a - /// field element. See `account_delta.masm` for more details on how this number was chosen. - pub const MAX_AMOUNT: u64 = 2u64.pow(63) - 2u64.pow(31); + /// Use [`AssetAmount::MAX`] instead for the validated wrapper type. + pub const MAX_AMOUNT: u64 = AssetAmount::MAX.inner(); /// The serialized size of a [`FungibleAsset`] in bytes. /// - /// Currently an account ID (15 bytes) plus an amount (u64). - pub const SERIALIZED_SIZE: usize = AccountId::SERIALIZED_SIZE + core::mem::size_of::(); + /// An account ID (15 bytes) plus an amount (u64) plus a callbacks flag (u8). + pub const SERIALIZED_SIZE: usize = AccountId::SERIALIZED_SIZE + + core::mem::size_of::() + + AssetCallbackFlag::SERIALIZED_SIZE; // CONSTRUCTOR // -------------------------------------------------------------------------------------------- + /// Returns a fungible asset instantiated with the provided faucet ID and amount. /// /// # Errors + /// /// Returns an error if: - /// - The faucet_id is not a valid fungible faucet ID. - /// - The provided amount is greater than 2^63 - 1. - pub const fn new(faucet_id: AccountId, amount: u64) -> Result { - let asset = Self { faucet_id, amount }; - asset.validate() + /// - The faucet ID is not a valid fungible faucet ID. + /// - The provided amount is greater than [`AssetAmount::MAX`]. + pub fn new(faucet_id: AccountId, amount: u64) -> Result { + if !matches!(faucet_id.account_type(), AccountType::FungibleFaucet) { + return Err(AssetError::FungibleFaucetIdTypeMismatch(faucet_id)); + } + + let amount = AssetAmount::new(amount)?; + + Ok(Self { + faucet_id, + amount, + callbacks: AssetCallbackFlag::default(), + }) } - /// Creates a new [FungibleAsset] without checking its validity. - pub(crate) fn new_unchecked(value: Word) -> FungibleAsset { - FungibleAsset { - faucet_id: AccountId::new_unchecked([value[3], value[2]]), - amount: value[0].as_int(), + /// Creates a fungible asset from the provided key and value. + /// + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - The provided key's asset ID limbs are not zero. + /// - The faucet ID is not a fungible faucet ID. + /// - The provided value's amount is greater than [`FungibleAsset::MAX_AMOUNT`] or its three + /// most significant elements are not zero. + pub fn from_key_value(key: AssetVaultKey, value: Word) -> Result { + if !key.asset_id().is_empty() { + return Err(AssetError::FungibleAssetIdMustBeZero(key.asset_id())); + } + + if value[1] != Felt::ZERO || value[2] != Felt::ZERO || value[3] != Felt::ZERO { + return Err(AssetError::FungibleAssetValueMostSignificantElementsMustBeZero(value)); } + + let mut asset = Self::new(key.faucet_id(), value[0].as_canonical_u64())?; + asset.callbacks = key.callback_flag(); + + Ok(asset) + } + + /// Creates a fungible asset from the provided key and value. + /// + /// Prefer [`Self::from_key_value`] for more type safety. + /// + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - [`Self::from_key_value`] fails. + pub fn from_key_value_words(key: Word, value: Word) -> Result { + let vault_key = AssetVaultKey::try_from(key)?; + Self::from_key_value(vault_key, value) + } + + /// Returns a copy of this asset with the given [`AssetCallbackFlag`]. + pub fn with_callbacks(mut self, callbacks: AssetCallbackFlag) -> Self { + self.callbacks = callbacks; + self } // PUBLIC ACCESSORS @@ -68,27 +122,43 @@ impl FungibleAsset { self.faucet_id } - /// Return ID prefix of the faucet which issued this asset. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { - self.faucet_id.prefix() - } - /// Returns the amount of this asset. - pub fn amount(&self) -> u64 { + pub fn amount(&self) -> AssetAmount { self.amount } - /// Returns true if this and the other assets were issued from the same faucet. - pub fn is_from_same_faucet(&self, other: &Self) -> bool { - self.faucet_id == other.faucet_id + /// Returns true if this and the other asset were issued from the same faucet. + pub fn is_same(&self, other: &Self) -> bool { + self.vault_key() == other.vault_key() + } + + /// Returns the [`AssetCallbackFlag`] of this asset. + pub fn callbacks(&self) -> AssetCallbackFlag { + self.callbacks } /// Returns the key which is used to store this asset in the account vault. pub fn vault_key(&self) -> AssetVaultKey { - AssetVaultKey::from_account_id(self.faucet_id) + AssetVaultKey::new(AssetId::default(), self.faucet_id, self.callbacks) .expect("faucet ID should be of type fungible") } + /// Returns the asset's key encoded to a [`Word`]. + pub fn to_key_word(&self) -> Word { + self.vault_key().to_word() + } + + /// Returns the asset's value encoded to a [`Word`]. + pub fn to_value_word(&self) -> Word { + Word::new([ + Felt::try_from(self.amount.inner()) + .expect("fungible asset should only allow amounts that fit into a felt"), + Felt::ZERO, + Felt::ZERO, + Felt::ZERO, + ]) + } + // OPERATIONS // -------------------------------------------------------------------------------------------- @@ -96,83 +166,60 @@ impl FungibleAsset { /// /// # Errors /// Returns an error if: - /// - The assets were not issued by the same faucet. + /// - The assets do not have the same vault key (i.e. different faucet or callback flags). /// - The total value of assets is greater than or equal to 2^63. #[allow(clippy::should_implement_trait)] pub fn add(self, other: Self) -> Result { - if self.faucet_id != other.faucet_id { - return Err(AssetError::FungibleAssetInconsistentFaucetIds { - original_issuer: self.faucet_id, - other_issuer: other.faucet_id, + if !self.is_same(&other) { + return Err(AssetError::FungibleAssetInconsistentVaultKeys { + original_key: self.vault_key(), + other_key: other.vault_key(), }); } - let amount = self + let raw_amount = self .amount - .checked_add(other.amount) + .inner() + .checked_add(other.amount.inner()) .expect("even MAX_AMOUNT + MAX_AMOUNT should not overflow u64"); - if amount > Self::MAX_AMOUNT { - return Err(AssetError::FungibleAssetAmountTooBig(amount)); - } + let amount = AssetAmount::new(raw_amount)?; - Ok(Self { faucet_id: self.faucet_id, amount }) + Ok(Self { + faucet_id: self.faucet_id, + amount, + callbacks: self.callbacks, + }) } /// Subtracts a fungible asset from another and returns the result. /// /// # Errors /// Returns an error if: - /// - The assets were not issued by the same faucet. + /// - The assets do not have the same vault key (i.e. different faucet or callback flags). /// - The final amount would be negative. #[allow(clippy::should_implement_trait)] pub fn sub(self, other: Self) -> Result { - if self.faucet_id != other.faucet_id { - return Err(AssetError::FungibleAssetInconsistentFaucetIds { - original_issuer: self.faucet_id, - other_issuer: other.faucet_id, + if !self.is_same(&other) { + return Err(AssetError::FungibleAssetInconsistentVaultKeys { + original_key: self.vault_key(), + other_key: other.vault_key(), }); } - let amount = self.amount.checked_sub(other.amount).ok_or( + let raw_amount = self.amount.inner().checked_sub(other.amount.inner()).ok_or( AssetError::FungibleAssetAmountNotSufficient { - minuend: self.amount, - subtrahend: other.amount, + minuend: self.amount.inner(), + subtrahend: other.amount.inner(), }, )?; - - Ok(FungibleAsset { faucet_id: self.faucet_id, amount }) - } - - // HELPER FUNCTIONS - // -------------------------------------------------------------------------------------------- - - /// Validates this fungible asset. - /// # Errors - /// Returns an error if: - /// - The faucet_id is not a valid fungible faucet ID. - /// - The provided amount is greater than 2^63 - 1. - const fn validate(self) -> Result { - let account_type = self.faucet_id.account_type(); - if !matches!(account_type, AccountType::FungibleFaucet) { - return Err(AssetError::FungibleFaucetIdTypeMismatch(self.faucet_id)); - } - - if self.amount > Self::MAX_AMOUNT { - return Err(AssetError::FungibleAssetAmountTooBig(self.amount)); - } - - Ok(self) - } -} - -impl From for Word { - fn from(asset: FungibleAsset) -> Self { - let mut result = Word::empty(); - result[0] = Felt::new(asset.amount); - result[2] = asset.faucet_id.suffix(); - result[3] = asset.faucet_id.prefix().as_felt(); - debug_assert!(is_not_a_non_fungible_asset(result)); - result + // SAFETY: subtraction of two valid amounts always produces a valid amount. + let amount = AssetAmount::new_unchecked(raw_amount); + + Ok(FungibleAsset { + faucet_id: self.faucet_id, + amount, + callbacks: self.callbacks, + }) } } @@ -182,22 +229,9 @@ impl From for Asset { } } -impl TryFrom for FungibleAsset { - type Error = AssetError; - - fn try_from(value: Word) -> Result { - if value[1] != ZERO { - return Err(AssetError::FungibleAssetExpectedZero(value)); - } - let faucet_id = AccountId::try_from([value[3], value[2]]) - .map_err(|err| AssetError::InvalidFaucetAccountId(Box::new(err)))?; - let amount = value[0].as_int(); - Self::new(faucet_id, amount) - } -} - impl fmt::Display for FungibleAsset { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // TODO: Replace with hex representation? write!(f, "{self:?}") } } @@ -211,42 +245,38 @@ impl Serializable for FungibleAsset { // distinguishable during deserialization. target.write(self.faucet_id); target.write(self.amount); + target.write(self.callbacks); } fn get_size_hint(&self) -> usize { - self.faucet_id.get_size_hint() + self.amount.get_size_hint() + self.faucet_id.get_size_hint() + + self.amount.get_size_hint() + + self.callbacks.get_size_hint() } } impl Deserializable for FungibleAsset { fn read_from(source: &mut R) -> Result { - let faucet_id_prefix: AccountIdPrefix = source.read()?; - FungibleAsset::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) + let faucet_id: AccountId = source.read()?; + FungibleAsset::deserialize_with_faucet_id(faucet_id, source) } } impl FungibleAsset { - /// Deserializes a [`FungibleAsset`] from an [`AccountIdPrefix`] and the remaining data from the - /// given `source`. - pub(super) fn deserialize_with_faucet_id_prefix( - faucet_id_prefix: AccountIdPrefix, + /// Deserializes a [`FungibleAsset`] from an [`AccountId`] and the remaining data from the given + /// `source`. + pub(super) fn deserialize_with_faucet_id( + faucet_id: AccountId, source: &mut R, ) -> Result { - // The 8 bytes of the prefix have already been read, so we only need to read the remaining 7 - // bytes of the account ID's 15 total bytes. - let suffix_bytes: [u8; 7] = source.read()?; - // Convert prefix back to bytes so we can call the TryFrom<[u8; 15]> impl. - let prefix_bytes: [u8; 8] = faucet_id_prefix.into(); - let mut id_bytes: [u8; 15] = [0; 15]; - id_bytes[..8].copy_from_slice(&prefix_bytes); - id_bytes[8..].copy_from_slice(&suffix_bytes); - - let faucet_id = AccountId::try_from(id_bytes) - .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; - let amount: u64 = source.read()?; - FungibleAsset::new(faucet_id, amount) - .map_err(|err| DeserializationError::InvalidValue(err.to_string())) + let callbacks = source.read()?; + + let asset = FungibleAsset::new(faucet_id, amount) + .map_err(|err| DeserializationError::InvalidValue(err.to_string()))? + .with_callbacks(callbacks); + + Ok(asset) } } @@ -255,6 +285,8 @@ impl FungibleAsset { #[cfg(test)] mod tests { + use assert_matches::assert_matches; + use super::*; use crate::account::AccountId; use crate::testing::account_id::{ @@ -267,7 +299,39 @@ mod tests { }; #[test] - fn test_fungible_asset_serde() { + fn fungible_asset_from_key_value_words_fails_on_invalid_asset_id() -> anyhow::Result<()> { + let faucet_id: AccountId = ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET.try_into()?; + let invalid_key = Word::from([ + Felt::from(1u32), + Felt::from(2u32), + faucet_id.suffix(), + faucet_id.prefix().as_felt(), + ]); + + let err = FungibleAsset::from_key_value_words( + invalid_key, + FungibleAsset::mock(5).to_value_word(), + ) + .unwrap_err(); + assert_matches!(err, AssetError::FungibleAssetIdMustBeZero(_)); + + Ok(()) + } + + #[test] + fn fungible_asset_from_key_value_fails_on_invalid_value() -> anyhow::Result<()> { + let asset = FungibleAsset::mock(42); + let mut invalid_value = asset.to_value_word(); + invalid_value[2] = Felt::from(5u32); + + let err = FungibleAsset::from_key_value(asset.vault_key(), invalid_value).unwrap_err(); + assert_matches!(err, AssetError::FungibleAssetValueMostSignificantElementsMustBeZero(_)); + + Ok(()) + } + + #[test] + fn test_fungible_asset_serde() -> anyhow::Result<()> { for fungible_account_id in [ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, @@ -281,6 +345,15 @@ mod tests { fungible_asset, FungibleAsset::read_from_bytes(&fungible_asset.to_bytes()).unwrap() ); + assert_eq!(fungible_asset.to_bytes().len(), fungible_asset.get_size_hint()); + + assert_eq!( + fungible_asset, + FungibleAsset::from_key_value_words( + fungible_asset.to_key_word(), + fungible_asset.to_value_word() + )? + ) } let account_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_3).unwrap(); @@ -296,5 +369,16 @@ mod tests { asset_bytes[0..15].copy_from_slice(&non_fungible_faucet_id.to_bytes()); let err = FungibleAsset::read_from_bytes(&asset_bytes).unwrap_err(); assert!(matches!(err, DeserializationError::InvalidValue(_))); + + Ok(()) + } + + #[test] + fn test_vault_key_for_fungible_asset() { + let asset = FungibleAsset::mock(34); + + assert_eq!(asset.vault_key().faucet_id(), FungibleAsset::mock_issuer()); + assert_eq!(asset.vault_key().asset_id().prefix().as_canonical_u64(), 0); + assert_eq!(asset.vault_key().asset_id().suffix().as_canonical_u64(), 0); } } diff --git a/crates/miden-protocol/src/asset/mod.rs b/crates/miden-protocol/src/asset/mod.rs index 4d14998289..3f6cde41f1 100644 --- a/crates/miden-protocol/src/asset/mod.rs +++ b/crates/miden-protocol/src/asset/mod.rs @@ -7,11 +7,13 @@ use super::utils::serde::{ DeserializationError, Serializable, }; -use super::{Felt, Hasher, Word, ZERO}; -use crate::account::AccountIdPrefix; +use super::{Felt, Word}; +use crate::account::AccountId; + +mod asset_amount; +pub use asset_amount::AssetAmount; mod fungible; -use alloc::boxed::Box; pub use fungible::FungibleAsset; @@ -22,70 +24,72 @@ pub use nonfungible::{NonFungibleAsset, NonFungibleAssetDetails}; mod token_symbol; pub use token_symbol::TokenSymbol; +mod asset_callbacks; +pub use asset_callbacks::AssetCallbacks; + +mod asset_callbacks_flag; +pub use asset_callbacks_flag::AssetCallbackFlag; + mod vault; -pub use vault::{AssetVault, AssetVaultKey, AssetWitness, PartialVault}; +pub use vault::{AssetId, AssetVault, AssetVaultKey, AssetWitness, PartialVault}; // ASSET // ================================================================================================ /// A fungible or a non-fungible asset. /// -/// All assets are encoded using a single word (4 elements) such that it is easy to determine the -/// type of an asset both inside and outside Miden VM. Specifically: -/// -/// Element 1 of the asset will be: -/// - ZERO for a fungible asset. -/// - non-ZERO for a non-fungible asset. -/// -/// Element 3 of both asset types is an [`AccountIdPrefix`] or equivalently, the prefix of an -/// [`AccountId`](crate::account::AccountId), which can be used to distinguish assets -/// based on [`AccountIdPrefix::account_type`]. +/// All assets are encoded as the vault key of the asset and its value, each represented as one word +/// (4 elements). This makes it is easy to determine the type of an asset both inside and outside +/// Miden VM. Specifically: /// -/// For element 3 of the vault keys of assets, the bit at index 5 (referred to as the -/// "fungible bit" will be): -/// - `1` for a fungible asset. -/// - `0` for a non-fungible asset. +/// The vault key of an asset contains the [`AccountId`] of the faucet that issues the asset. It can +/// be used to distinguish assets based on the encoded [`AccountId::account_type`]. In the vault +/// keys of assets, the account type bits at index 4 and 5 determine whether the asset is fungible +/// or non-fungible. /// -/// The above properties guarantee that there can never be a collision between a fungible and a +/// This property guarantees that there can never be a collision between a fungible and a /// non-fungible asset. /// /// The methodology for constructing fungible and non-fungible assets is described below. /// /// # Fungible assets /// -/// - A fungible asset's data layout is: `[amount, 0, faucet_id_suffix, faucet_id_prefix]`. +/// - A fungible asset's value layout is: `[amount, 0, 0, 0]`. /// - A fungible asset's vault key layout is: `[0, 0, faucet_id_suffix, faucet_id_prefix]`. /// -/// The most significant elements of a fungible asset are set to the prefix (`faucet_id_prefix`) and -/// suffix (`faucet_id_suffix`) of the ID of the faucet which issues the asset. This guarantees the -/// properties described above (the fungible bit is `1`). +/// The most significant elements of a fungible asset's key are set to the prefix +/// (`faucet_id_prefix`) and suffix (`faucet_id_suffix`) of the ID of the faucet which issues the +/// asset. The asset ID limbs are set to zero, which means two instances of the same fungible asset +/// have the same asset key and will be merged together when stored in the same account's vault. /// -/// The least significant element is set to the amount of the asset. This amount cannot be greater -/// than [`FungibleAsset::MAX_AMOUNT`] and thus fits into a felt. -/// -/// Elements 1 and 2 are set to ZERO. +/// The least significant element of the value is set to the amount of the asset and the remaining +/// felts are zero. This amount cannot be greater than [`FungibleAsset::MAX_AMOUNT`] and thus fits +/// into a felt. /// /// It is impossible to find a collision between two fungible assets issued by different faucets as -/// the faucet_id is included in the description of the asset and this is guaranteed to be different +/// the faucet ID is included in the description of the asset and this is guaranteed to be different /// for each faucet as per the faucet creation logic. /// /// # Non-fungible assets /// -/// - A non-fungible asset's data layout is: `[hash0, hash1, hash2, faucet_id_prefix]`. -/// - A non-fungible asset's vault key layout is: `[faucet_id_prefix, hash1, hash2, hash0']`, where -/// `hash0'` is equivalent to `hash0` with the fungible bit set to `0`. See -/// [`NonFungibleAsset::vault_key`] for more details. +/// - A non-fungible asset's data layout is: `[hash0, hash1, hash2, hash3]`. +/// - A non-fungible asset's vault key layout is: `[hash0, hash1, faucet_id_suffix, +/// faucet_id_prefix]`. /// -/// The 4 elements of non-fungible assets are computed as follows: -/// - First the asset data is hashed. This compresses an asset of an arbitrary length to 4 field -/// elements: `[hash0, hash1, hash2, hash3]`. -/// - `hash3` is then replaced with the prefix of the faucet ID (`faucet_id_prefix`) which issues -/// the asset: `[hash0, hash1, hash2, faucet_id_prefix]`. +/// The 4 elements of non-fungible assets are computed by hashing the asset data. This compresses an +/// asset of an arbitrary length to 4 field elements: `[hash0, hash1, hash2, hash3]`. /// /// It is impossible to find a collision between two non-fungible assets issued by different faucets -/// as the faucet_id is included in the description of the non-fungible asset and this is guaranteed -/// to be different as per the faucet creation logic. Collision resistance for non-fungible assets -/// issued by the same faucet is ~2^95. +/// as the faucet ID is included in the description of the non-fungible asset and this is guaranteed +/// to be different as per the faucet creation logic. +/// +/// The most significant elements of a non-fungible asset's key are set to the prefix +/// (`faucet_id_prefix`) and suffix (`faucet_id_suffix`) of the ID of the faucet which issues the +/// asset. The asset ID limbs are set to hashes from the asset's value. This means the collision +/// resistance of non-fungible assets issued by the same faucet is ~2^64, due to the 128-bit asset +/// ID that is unique per non-fungible asset. In other words, two non-fungible assets issued by the +/// same faucet are very unlikely to have the same asset key and thus should not collide when stored +/// in the same account's vault. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Asset { Fungible(FungibleAsset), @@ -93,47 +97,66 @@ pub enum Asset { } impl Asset { - /// Creates a new [Asset] without checking its validity. - pub(crate) fn new_unchecked(value: Word) -> Asset { - if is_not_a_non_fungible_asset(value) { - Asset::Fungible(FungibleAsset::new_unchecked(value)) + /// Creates an asset from the provided key and value. + /// + /// # Errors + /// + /// Returns an error if: + /// - [`FungibleAsset::from_key_value`] or [`NonFungibleAsset::from_key_value`] fails. + pub fn from_key_value(key: AssetVaultKey, value: Word) -> Result { + if matches!(key.faucet_id().account_type(), AccountType::FungibleFaucet) { + FungibleAsset::from_key_value(key, value).map(Asset::Fungible) } else { - Asset::NonFungible(unsafe { NonFungibleAsset::new_unchecked(value) }) + NonFungibleAsset::from_key_value(key, value).map(Asset::NonFungible) + } + } + + /// Creates an asset from the provided key and value. + /// + /// Prefer [`Self::from_key_value`] for more type safety. + /// + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - [`Self::from_key_value`] fails. + pub fn from_key_value_words(key: Word, value: Word) -> Result { + let vault_key = AssetVaultKey::try_from(key)?; + Self::from_key_value(vault_key, value) + } + + /// Returns a copy of this asset with the given [`AssetCallbackFlag`]. + pub fn with_callbacks(self, callbacks: AssetCallbackFlag) -> Self { + match self { + Asset::Fungible(fungible_asset) => fungible_asset.with_callbacks(callbacks).into(), + Asset::NonFungible(non_fungible_asset) => { + non_fungible_asset.with_callbacks(callbacks).into() + }, } } /// Returns true if this asset is the same as the specified asset. /// - /// Two assets are defined to be the same if: - /// - For fungible assets, if they were issued by the same faucet. - /// - For non-fungible assets, if the assets are identical. + /// Two assets are defined to be the same if their vault keys match. pub fn is_same(&self, other: &Self) -> bool { - use Asset::*; - match (self, other) { - (Fungible(l), Fungible(r)) => l.is_from_same_faucet(r), - (NonFungible(l), NonFungible(r)) => l == r, - _ => false, - } + self.vault_key() == other.vault_key() } /// Returns true if this asset is a fungible asset. - pub const fn is_fungible(&self) -> bool { + pub fn is_fungible(&self) -> bool { matches!(self, Self::Fungible(_)) } /// Returns true if this asset is a non fungible asset. - pub const fn is_non_fungible(&self) -> bool { + pub fn is_non_fungible(&self) -> bool { matches!(self, Self::NonFungible(_)) } - /// Returns the prefix of the faucet ID which issued this asset. - /// - /// To get the full [`AccountId`](crate::account::AccountId) of a fungible asset the asset - /// must be matched on. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { + /// Returns the ID of the faucet that issued this asset. + pub fn faucet_id(&self) -> AccountId { match self { - Self::Fungible(asset) => asset.faucet_id_prefix(), - Self::NonFungible(asset) => asset.faucet_id_prefix(), + Self::Fungible(asset) => asset.faucet_id(), + Self::NonFungible(asset) => asset.faucet_id(), } } @@ -145,6 +168,30 @@ impl Asset { } } + /// Returns the asset's key encoded to a [`Word`]. + pub fn to_key_word(&self) -> Word { + self.vault_key().to_word() + } + + /// Returns the asset's value encoded to a [`Word`]. + pub fn to_value_word(&self) -> Word { + match self { + Asset::Fungible(fungible_asset) => fungible_asset.to_value_word(), + Asset::NonFungible(non_fungible_asset) => non_fungible_asset.to_value_word(), + } + } + + /// Returns the asset encoded as elements. + /// + /// The first four elements contain the asset key and the last four elements contain the asset + /// value. + pub fn as_elements(&self) -> [Felt; 8] { + let mut elements = [Felt::ZERO; 8]; + elements[0..4].copy_from_slice(self.to_key_word().as_elements()); + elements[4..8].copy_from_slice(self.to_value_word().as_elements()); + elements + } + /// Returns the inner [`FungibleAsset`]. /// /// # Panics @@ -170,47 +217,6 @@ impl Asset { } } -impl From for Word { - fn from(asset: Asset) -> Self { - match asset { - Asset::Fungible(asset) => asset.into(), - Asset::NonFungible(asset) => asset.into(), - } - } -} - -impl From<&Asset> for Word { - fn from(value: &Asset) -> Self { - (*value).into() - } -} - -impl TryFrom<&Word> for Asset { - type Error = AssetError; - - fn try_from(value: &Word) -> Result { - (*value).try_into() - } -} - -impl TryFrom for Asset { - type Error = AssetError; - - fn try_from(value: Word) -> Result { - // Return an error if element 3 is not a valid account ID prefix, which cannot be checked by - // is_not_a_non_fungible_asset. - // Keep in mind serialized assets do _not_ carry the suffix required to reconstruct the full - // account identifier. - let prefix = AccountIdPrefix::try_from(value[3]) - .map_err(|err| AssetError::InvalidFaucetAccountId(Box::from(err)))?; - match prefix.account_type() { - AccountType::FungibleFaucet => FungibleAsset::try_from(value).map(Asset::from), - AccountType::NonFungibleFaucet => NonFungibleAsset::try_from(value).map(Asset::from), - _ => Err(AssetError::InvalidFaucetAccountIdPrefix(prefix)), - } - } -} - // SERIALIZATION // ================================================================================================ @@ -232,58 +238,34 @@ impl Serializable for Asset { impl Deserializable for Asset { fn read_from(source: &mut R) -> Result { - // Both asset types have their faucet ID prefix as the first element, so we can use it to - // inspect what type of asset it is. - let faucet_id_prefix: AccountIdPrefix = source.read()?; + // Both asset types have their faucet ID as the first element, so we can use it to inspect + // what type of asset it is. + let faucet_id: AccountId = source.read()?; - match faucet_id_prefix.account_type() { + match faucet_id.account_type() { AccountType::FungibleFaucet => { - FungibleAsset::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) - .map(Asset::from) + FungibleAsset::deserialize_with_faucet_id(faucet_id, source).map(Asset::from) }, AccountType::NonFungibleFaucet => { - NonFungibleAsset::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) - .map(Asset::from) + NonFungibleAsset::deserialize_with_faucet_id(faucet_id, source).map(Asset::from) }, other_type => Err(DeserializationError::InvalidValue(format!( - "failed to deserialize asset: expected an account ID prefix of type faucet, found {other_type:?}" + "failed to deserialize asset: expected an account ID prefix of type faucet, found {other_type}" ))), } } } -// HELPER FUNCTIONS -// ================================================================================================ - -/// Returns `true` if asset in [Word] is not a non-fungible asset. -/// -/// Note: this does not mean that the word is a fungible asset as the word may contain a value -/// which is not a valid asset. -fn is_not_a_non_fungible_asset(asset: Word) -> bool { - match AccountIdPrefix::try_from(asset[3]) { - Ok(prefix) => { - matches!(prefix.account_type(), AccountType::FungibleFaucet) - }, - Err(_err) => { - #[cfg(debug_assertions)] - panic!("invalid account ID prefix passed to is_not_a_non_fungible_asset: {_err}"); - #[cfg(not(debug_assertions))] - false - }, - } -} - // TESTS // ================================================================================================ #[cfg(test)] mod tests { - use miden_crypto::Word; use miden_crypto::utils::{Deserializable, Serializable}; use super::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; - use crate::account::{AccountId, AccountIdPrefix}; + use crate::account::AccountId; use crate::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, @@ -295,8 +277,9 @@ mod tests { ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, }; + /// Tests the serialization roundtrip for assets for assets <-> bytes and assets <-> words. #[test] - fn test_asset_serde() { + fn test_asset_serde() -> anyhow::Result<()> { for fungible_account_id in [ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, @@ -307,6 +290,13 @@ mod tests { let account_id = AccountId::try_from(fungible_account_id).unwrap(); let fungible_asset: Asset = FungibleAsset::new(account_id, 10).unwrap().into(); assert_eq!(fungible_asset, Asset::read_from_bytes(&fungible_asset.to_bytes()).unwrap()); + assert_eq!( + fungible_asset, + Asset::from_key_value_words( + fungible_asset.to_key_word(), + fungible_asset.to_value_word() + )?, + ); } for non_fungible_account_id in [ @@ -315,50 +305,33 @@ mod tests { ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, ] { let account_id = AccountId::try_from(non_fungible_account_id).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); + let details = NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(); let non_fungible_asset: Asset = NonFungibleAsset::new(&details).unwrap().into(); assert_eq!( non_fungible_asset, Asset::read_from_bytes(&non_fungible_asset.to_bytes()).unwrap() ); - } - } - - #[test] - fn test_new_unchecked() { - for fungible_account_id in [ - ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_2, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_3, - ] { - let account_id = AccountId::try_from(fungible_account_id).unwrap(); - let fungible_asset: Asset = FungibleAsset::new(account_id, 10).unwrap().into(); - assert_eq!(fungible_asset, Asset::new_unchecked(Word::from(&fungible_asset))); + assert_eq!( + non_fungible_asset, + Asset::from_key_value_words( + non_fungible_asset.to_key_word(), + non_fungible_asset.to_value_word() + )? + ); } - for non_fungible_account_id in [ - ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, - ] { - let account_id = AccountId::try_from(non_fungible_account_id).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); - let non_fungible_asset: Asset = NonFungibleAsset::new(&details).unwrap().into(); - assert_eq!(non_fungible_asset, Asset::new_unchecked(Word::from(non_fungible_asset))); - } + Ok(()) } - /// This test asserts that account ID's prefix is serialized in the first felt of assets. + /// This test asserts that account ID's is serialized in the first felt of assets. /// Asset deserialization relies on that fact and if this changes the serialization must /// be updated. #[test] - fn test_account_id_prefix_is_in_first_serialized_felt() { + fn test_account_id_is_serialized_first() { for asset in [FungibleAsset::mock(300), NonFungibleAsset::mock(&[0xaa, 0xbb])] { let serialized_asset = asset.to_bytes(); - let prefix = AccountIdPrefix::read_from_bytes(&serialized_asset).unwrap(); - assert_eq!(prefix, asset.faucet_id_prefix()); + let prefix = AccountId::read_from_bytes(&serialized_asset).unwrap(); + assert_eq!(prefix, asset.faucet_id()); } } } diff --git a/crates/miden-protocol/src/asset/nonfungible.rs b/crates/miden-protocol/src/asset/nonfungible.rs index d48b26602b..c6fcec2297 100644 --- a/crates/miden-protocol/src/asset/nonfungible.rs +++ b/crates/miden-protocol/src/asset/nonfungible.rs @@ -1,44 +1,37 @@ -use alloc::boxed::Box; use alloc::string::ToString; use alloc::vec::Vec; use core::fmt; use super::vault::AssetVaultKey; -use super::{AccountIdPrefix, AccountType, Asset, AssetError, Felt, Hasher, Word}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; -use crate::{FieldElement, WORD_SIZE}; - -/// Position of the faucet_id inside the [`NonFungibleAsset`] word having fields in BigEndian. -const FAUCET_ID_POS_BE: usize = 3; +use super::{AccountType, Asset, AssetCallbackFlag, AssetError, Word}; +use crate::Hasher; +use crate::account::AccountId; +use crate::asset::vault::AssetId; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // NON-FUNGIBLE ASSET // ================================================================================================ /// A commitment to a non-fungible asset. /// -/// The commitment is constructed as follows: -/// -/// - Hash the asset data producing `[hash0, hash1, hash2, hash3]`. -/// - Replace the value of `hash3` with the prefix of the faucet id (`faucet_id_prefix`) producing -/// `[hash0, hash1, hash2, faucet_id_prefix]`. -/// - This layout ensures that fungible and non-fungible assets are distinguishable by interpreting -/// the 3rd element of an asset as an [`AccountIdPrefix`] and checking its type. +/// See [`Asset`] for details on how it is constructed. /// /// [`NonFungibleAsset`] itself does not contain the actual asset data. The container for this data /// is [`NonFungibleAssetDetails`]. +/// +/// The non-fungible asset can have callbacks to the faucet enabled or disabled, depending on +/// [`AssetCallbackFlag`]. See [`AssetCallbacks`](crate::asset::AssetCallbacks) for more details. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct NonFungibleAsset(Word); - -impl PartialOrd for NonFungibleAsset { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for NonFungibleAsset { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.0.cmp(&other.0) - } +pub struct NonFungibleAsset { + faucet_id: AccountId, + value: Word, + callbacks: AssetCallbackFlag, } impl NonFungibleAsset { @@ -47,8 +40,9 @@ impl NonFungibleAsset { /// The serialized size of a [`NonFungibleAsset`] in bytes. /// - /// Currently represented as a word. - pub const SERIALIZED_SIZE: usize = Felt::ELEMENT_BYTES * WORD_SIZE; + /// An account ID (15 bytes) plus a word (32 bytes) plus a callbacks flag (1 byte). + pub const SERIALIZED_SIZE: usize = + AccountId::SERIALIZED_SIZE + Word::SERIALIZED_SIZE + AssetCallbackFlag::SERIALIZED_SIZE; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -70,110 +64,110 @@ impl NonFungibleAsset { /// /// # Errors /// Returns an error if the provided faucet ID is not for a non-fungible asset faucet. - pub fn from_parts(faucet_id: AccountIdPrefix, mut data_hash: Word) -> Result { + pub fn from_parts(faucet_id: AccountId, value: Word) -> Result { if !matches!(faucet_id.account_type(), AccountType::NonFungibleFaucet) { return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); } - data_hash[FAUCET_ID_POS_BE] = Felt::from(faucet_id); - - Ok(Self(data_hash)) + Ok(Self { + faucet_id, + value, + callbacks: AssetCallbackFlag::default(), + }) } - /// Creates a new [NonFungibleAsset] without checking its validity. + /// Creates a non-fungible asset from the provided key and value. /// - /// # Safety - /// This function requires that the provided value is a valid word encoding of a - /// [NonFungibleAsset]. - pub unsafe fn new_unchecked(value: Word) -> NonFungibleAsset { - NonFungibleAsset(value) - } + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - The provided key's asset ID limbs are not equal to the provided value's first and second + /// element. + /// - The faucet ID is not a non-fungible faucet ID. + pub fn from_key_value(key: AssetVaultKey, value: Word) -> Result { + if key.asset_id().suffix() != value[0] || key.asset_id().prefix() != value[1] { + return Err(AssetError::NonFungibleAssetIdMustMatchValue { + asset_id: key.asset_id(), + value, + }); + } - // ACCESSORS - // -------------------------------------------------------------------------------------------- + let mut asset = Self::from_parts(key.faucet_id(), value)?; + asset.callbacks = key.callback_flag(); - /// Returns the vault key of the [`NonFungibleAsset`]. - /// - /// This is the same as the asset with the following modifications, in this order: - /// - Swaps the faucet ID at index 0 and `hash0` at index 3. - /// - Sets the fungible bit for `hash0` to `0`. + Ok(asset) + } + + /// Creates a non-fungible asset from the provided key and value. /// - /// # Rationale + /// Prefer [`Self::from_key_value`] for more type safety. /// - /// This means `hash0` will be used as the leaf index in the asset SMT which ensures that a - /// non-fungible faucet's assets generally end up in different leaves as the key is not based on - /// the faucet ID. + /// # Errors /// - /// It also ensures that there is never any collision in the leaf index between a non-fungible - /// asset and a fungible asset, as the former's vault key always has the fungible bit set to `0` - /// and the latter's vault key always has the bit set to `1`. - pub fn vault_key(&self) -> AssetVaultKey { - let mut vault_key = self.0; - - // Swap prefix of faucet ID with hash0. - vault_key.swap(0, FAUCET_ID_POS_BE); - - // Set the fungible bit to zero. - vault_key[3] = - AccountIdPrefix::clear_fungible_bit(self.faucet_id_prefix().version(), vault_key[3]); - - AssetVaultKey::new_unchecked(vault_key) + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - [`Self::from_key_value`] fails. + pub fn from_key_value_words(key: Word, value: Word) -> Result { + let vault_key = AssetVaultKey::try_from(key)?; + Self::from_key_value(vault_key, value) } - /// Return ID prefix of the faucet which issued this asset. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { - AccountIdPrefix::new_unchecked(self.0[FAUCET_ID_POS_BE]) + /// Returns a copy of this asset with the given [`AssetCallbackFlag`]. + pub fn with_callbacks(mut self, callbacks: AssetCallbackFlag) -> Self { + self.callbacks = callbacks; + self } - // HELPER FUNCTIONS + // ACCESSORS // -------------------------------------------------------------------------------------------- - /// Validates this non-fungible asset. - /// # Errors - /// Returns an error if: - /// - The faucet_id is not a valid non-fungible faucet ID. - /// - The most significant bit of the asset is not ZERO. - fn validate(&self) -> Result<(), AssetError> { - let faucet_id = AccountIdPrefix::try_from(self.0[FAUCET_ID_POS_BE]) - .map_err(|err| AssetError::InvalidFaucetAccountId(Box::new(err)))?; - - let account_type = faucet_id.account_type(); - if !matches!(account_type, AccountType::NonFungibleFaucet) { - return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); - } + /// Returns the vault key of the [`NonFungibleAsset`]. + /// + /// See [`Asset`] docs for details on the key. + pub fn vault_key(&self) -> AssetVaultKey { + let asset_id_suffix = self.value[0]; + let asset_id_prefix = self.value[1]; + let asset_id = AssetId::new(asset_id_suffix, asset_id_prefix); - Ok(()) + AssetVaultKey::new(asset_id, self.faucet_id, self.callbacks) + .expect("constructors should ensure account ID is of type non-fungible faucet") } -} -impl From for Word { - fn from(asset: NonFungibleAsset) -> Self { - asset.0 + /// Returns the ID of the faucet which issued this asset. + pub fn faucet_id(&self) -> AccountId { + self.faucet_id } -} -impl From for Asset { - fn from(asset: NonFungibleAsset) -> Self { - Asset::NonFungible(asset) + /// Returns the [`AssetCallbackFlag`] of this asset. + pub fn callbacks(&self) -> AssetCallbackFlag { + self.callbacks } -} -impl TryFrom for NonFungibleAsset { - type Error = AssetError; + /// Returns the asset's key encoded to a [`Word`]. + pub fn to_key_word(&self) -> Word { + self.vault_key().to_word() + } - fn try_from(value: Word) -> Result { - let asset = Self(value); - asset.validate()?; - Ok(asset) + /// Returns the asset's value encoded to a [`Word`]. + pub fn to_value_word(&self) -> Word { + self.value } } impl fmt::Display for NonFungibleAsset { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // TODO: Replace with hex representation? write!(f, "{self:?}") } } +impl From for Asset { + fn from(asset: NonFungibleAsset) -> Self { + Asset::NonFungible(asset) + } +} + // SERIALIZATION // ================================================================================================ @@ -181,44 +175,38 @@ impl Serializable for NonFungibleAsset { fn write_into(&self, target: &mut W) { // All assets should serialize their faucet ID at the first position to allow them to be // easily distinguishable during deserialization. - target.write(self.faucet_id_prefix()); - target.write(self.0[2]); - target.write(self.0[1]); - target.write(self.0[0]); + target.write(self.faucet_id()); + target.write(self.value); + target.write(self.callbacks); } fn get_size_hint(&self) -> usize { - Self::SERIALIZED_SIZE + self.faucet_id.get_size_hint() + self.value.get_size_hint() + self.callbacks.get_size_hint() } } impl Deserializable for NonFungibleAsset { fn read_from(source: &mut R) -> Result { - let faucet_id_prefix: AccountIdPrefix = source.read()?; + let faucet_id: AccountId = source.read()?; - Self::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) + Self::deserialize_with_faucet_id(faucet_id, source) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } impl NonFungibleAsset { - /// Deserializes a [`NonFungibleAsset`] from an [`AccountIdPrefix`] and the remaining data from - /// the given `source`. - pub(super) fn deserialize_with_faucet_id_prefix( - faucet_id_prefix: AccountIdPrefix, + /// Deserializes a [`NonFungibleAsset`] from an [`AccountId`] and the remaining data from the + /// given `source`. + pub(super) fn deserialize_with_faucet_id( + faucet_id: AccountId, source: &mut R, ) -> Result { - let hash_2: Felt = source.read()?; - let hash_1: Felt = source.read()?; - let hash_0: Felt = source.read()?; - - // The last felt in the data_hash will be replaced by the faucet id, so we can set it to - // zero here. - NonFungibleAsset::from_parts( - faucet_id_prefix, - Word::from([hash_0, hash_1, hash_2, Felt::ZERO]), - ) - .map_err(|err| DeserializationError::InvalidValue(err.to_string())) + let value: Word = source.read()?; + let callbacks: AssetCallbackFlag = source.read()?; + + NonFungibleAsset::from_parts(faucet_id, value) + .map(|asset| asset.with_callbacks(callbacks)) + .map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } @@ -230,7 +218,7 @@ impl NonFungibleAsset { /// Unlike [NonFungibleAsset] struct, this struct contains full details of a non-fungible asset. #[derive(Debug, Clone, PartialEq, Eq)] pub struct NonFungibleAssetDetails { - faucet_id: AccountIdPrefix, + faucet_id: AccountId, asset_data: Vec, } @@ -239,7 +227,7 @@ impl NonFungibleAssetDetails { /// /// # Errors /// Returns an error if the provided faucet ID is not for a non-fungible asset faucet. - pub fn new(faucet_id: AccountIdPrefix, asset_data: Vec) -> Result { + pub fn new(faucet_id: AccountId, asset_data: Vec) -> Result { if !matches!(faucet_id.account_type(), AccountType::NonFungibleFaucet) { return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); } @@ -248,7 +236,7 @@ impl NonFungibleAssetDetails { } /// Returns ID of the faucet which issued this asset. - pub fn faucet_id(&self) -> AccountIdPrefix { + pub fn faucet_id(&self) -> AccountId { self.faucet_id } @@ -266,6 +254,7 @@ mod tests { use assert_matches::assert_matches; use super::*; + use crate::Felt; use crate::account::AccountId; use crate::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, @@ -275,32 +264,66 @@ mod tests { }; #[test] - fn test_non_fungible_asset_serde() { + fn fungible_asset_from_key_value_fails_on_invalid_asset_id() -> anyhow::Result<()> { + let invalid_key = AssetVaultKey::new_native( + AssetId::new(Felt::from(1u32), Felt::from(2u32)), + ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET.try_into()?, + )?; + let err = + NonFungibleAsset::from_key_value(invalid_key, Word::from([4, 5, 6, 7u32])).unwrap_err(); + + assert_matches!(err, AssetError::NonFungibleAssetIdMustMatchValue { .. }); + + Ok(()) + } + + #[test] + fn test_non_fungible_asset_serde() -> anyhow::Result<()> { for non_fungible_account_id in [ ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, ] { let account_id = AccountId::try_from(non_fungible_account_id).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); + let details = NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(); let non_fungible_asset = NonFungibleAsset::new(&details).unwrap(); assert_eq!( non_fungible_asset, NonFungibleAsset::read_from_bytes(&non_fungible_asset.to_bytes()).unwrap() ); + assert_eq!(non_fungible_asset.to_bytes().len(), non_fungible_asset.get_size_hint()); + + assert_eq!( + non_fungible_asset, + NonFungibleAsset::from_key_value_words( + non_fungible_asset.to_key_word(), + non_fungible_asset.to_value_word() + )? + ) } let account = AccountId::try_from(ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET).unwrap(); - let details = NonFungibleAssetDetails::new(account.prefix(), vec![4, 5, 6, 7]).unwrap(); + let details = NonFungibleAssetDetails::new(account, vec![4, 5, 6, 7]).unwrap(); let asset = NonFungibleAsset::new(&details).unwrap(); let mut asset_bytes = asset.to_bytes(); let fungible_faucet_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); - // Set invalid Faucet ID Prefix. - asset_bytes[0..8].copy_from_slice(&fungible_faucet_id.prefix().to_bytes()); + // Set invalid faucet ID. + asset_bytes[0..AccountId::SERIALIZED_SIZE].copy_from_slice(&fungible_faucet_id.to_bytes()); let err = NonFungibleAsset::read_from_bytes(&asset_bytes).unwrap_err(); assert_matches!(err, DeserializationError::InvalidValue(msg) if msg.contains("must be of type NonFungibleFaucet")); + + Ok(()) + } + + #[test] + fn test_vault_key_for_non_fungible_asset() { + let asset = NonFungibleAsset::mock(&[42]); + + assert_eq!(asset.vault_key().faucet_id(), NonFungibleAsset::mock_issuer()); + assert_eq!(asset.vault_key().asset_id().suffix(), asset.to_value_word()[0]); + assert_eq!(asset.vault_key().asset_id().prefix(), asset.to_value_word()[1]); } } diff --git a/crates/miden-protocol/src/asset/token_symbol.rs b/crates/miden-protocol/src/asset/token_symbol.rs index 4be4e5e30d..7189d6805b 100644 --- a/crates/miden-protocol/src/asset/token_symbol.rs +++ b/crates/miden-protocol/src/asset/token_symbol.rs @@ -1,12 +1,16 @@ +use alloc::fmt; use alloc::string::String; use super::{Felt, TokenSymbolError}; -/// Represents a string token symbol (e.g. "POL", "ETH") as a single [`Felt`] value. +/// Represents a token symbol (e.g. "POL", "ETH"). /// -/// Token Symbols can consists of up to 12 capital Latin characters, e.g. "C", "ETH", "MIDEN". -#[derive(Default, Clone, Copy, Debug, PartialEq)] -pub struct TokenSymbol(Felt); +/// Token Symbols can consist of up to 12 capital Latin characters, e.g. "C", "ETH", "MIDEN". +/// +/// The symbol is stored as a [`String`] and can be converted to a [`Felt`] encoding via +/// [`as_element()`](Self::as_element). +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TokenSymbol(String); impl TokenSymbol { /// Maximum allowed length of the token string. @@ -15,34 +19,25 @@ impl TokenSymbol { /// The length of the set of characters that can be used in a token's name. pub const ALPHABET_LENGTH: u64 = 26; + /// The minimum integer value of an encoded [`TokenSymbol`]. + /// + /// This value encodes the "A" token symbol. + pub const MIN_ENCODED_VALUE: u64 = 1; + /// The maximum integer value of an encoded [`TokenSymbol`]. /// /// This value encodes the "ZZZZZZZZZZZZ" token symbol. pub const MAX_ENCODED_VALUE: u64 = 2481152873203736562; - /// Constructs a new [`TokenSymbol`] from a static string. - /// - /// This function is `const` and can be used to define token symbols as constants, e.g.: - /// - /// ```rust - /// # use miden_protocol::asset::TokenSymbol; - /// const TOKEN: TokenSymbol = TokenSymbol::from_static_str("ETH"); - /// ``` - /// - /// This is convenient because using a string that is not a valid token symbol fails to - /// compile. + /// Constructs a new [`TokenSymbol`] from a string, panicking on invalid input. /// /// # Panics /// /// Panics if: /// - The length of the provided string is less than 1 or greater than 12. /// - The provided token string contains characters that are not uppercase ASCII. - pub const fn from_static_str(symbol: &'static str) -> Self { - match encode_symbol_to_felt(symbol) { - Ok(felt) => Self(felt), - // We cannot format the error in a const context. - Err(_) => panic!("invalid token symbol"), - } + pub fn new_unchecked(symbol: &str) -> Self { + Self::new(symbol).expect("invalid token symbol") } /// Creates a new [`TokenSymbol`] instance from the provided token name string. @@ -52,150 +47,132 @@ impl TokenSymbol { /// - The length of the provided string is less than 1 or greater than 12. /// - The provided token string contains characters that are not uppercase ASCII. pub fn new(symbol: &str) -> Result { - let felt = encode_symbol_to_felt(symbol)?; - Ok(Self(felt)) - } + let len = symbol.len(); - /// Returns the token name string from the encoded [`TokenSymbol`] value. - /// - /// # Errors - /// Returns an error if: - /// - The encoded value exceeds the maximum value of [`Self::MAX_ENCODED_VALUE`]. - /// - The encoded token string length is less than 1 or greater than 12. - /// - The encoded token string length is less than the actual string length. - pub fn to_string(&self) -> Result { - decode_felt_to_symbol(self.0) - } -} + if len == 0 || len > Self::MAX_SYMBOL_LENGTH { + return Err(TokenSymbolError::InvalidLength(len)); + } -impl From for Felt { - fn from(symbol: TokenSymbol) -> Self { - symbol.0 + for byte in symbol.as_bytes() { + if !byte.is_ascii_uppercase() { + return Err(TokenSymbolError::InvalidCharacter); + } + } + + Ok(Self(String::from(symbol))) } -} -impl TryFrom<&str> for TokenSymbol { - type Error = TokenSymbolError; + /// Returns the [`Felt`] encoding of this token symbol. + /// + /// The alphabet used in the encoding process consists of the Latin capital letters as defined + /// in the ASCII table, having the length of 26 characters. + /// + /// The encoding is performed by multiplying the intermediate encoded value by the length of + /// the used alphabet and adding the relative index of the character to it. At the end of the + /// encoding process the length of the initial token string is added to the encoded value. + /// + /// Relative character index is computed by subtracting the index of the character "A" (65) + /// from the index of the currently processing character, e.g., `A = 65 - 65 = 0`, + /// `B = 66 - 65 = 1`, `...` , `Z = 90 - 65 = 25`. + pub fn as_element(&self) -> Felt { + let bytes = self.0.as_bytes(); + let len = bytes.len(); + + let mut encoded_value: u64 = 0; + let mut idx = 0; + + while idx < len { + let digit = (bytes[idx] - b'A') as u64; + encoded_value = encoded_value * Self::ALPHABET_LENGTH + digit; + idx += 1; + } - fn try_from(symbol: &str) -> Result { - TokenSymbol::new(symbol) + // add token length to the encoded value to be able to decode the exact number of + // characters + encoded_value = encoded_value * Self::ALPHABET_LENGTH + len as u64; + + Felt::new(encoded_value) } } -impl TryFrom for TokenSymbol { - type Error = TokenSymbolError; - - fn try_from(felt: Felt) -> Result { - // Check if the felt value is within the valid range - if felt.as_int() > Self::MAX_ENCODED_VALUE { - return Err(TokenSymbolError::ValueTooLarge(felt.as_int())); - } - Ok(TokenSymbol(felt)) +impl fmt::Display for TokenSymbol { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0) } } -// HELPER FUNCTIONS -// ================================================================================================ - -/// Encodes the provided token symbol string into a single [`Felt`] value. -/// -/// The alphabet used in the decoding process consists of the Latin capital letters as defined in -/// the ASCII table, having the length of 26 characters. -/// -/// The encoding is performed by multiplying the intermediate encrypted value by the length of the -/// used alphabet and adding the relative index of the character to it. At the end of the encoding -/// process the length of the initial token string is added to the encrypted value. -/// -/// Relative character index is computed by subtracting the index of the character "A" (65) from the -/// index of the currently processing character, e.g., `A = 65 - 65 = 0`, `B = 66 - 65 = 1`, `...` , -/// `Z = 90 - 65 = 25`. -/// -/// # Errors -/// Returns an error if: -/// - The length of the provided string is less than 1 or greater than 12. -/// - The provided token string contains characters that are not uppercase ASCII. -const fn encode_symbol_to_felt(s: &str) -> Result { - let bytes = s.as_bytes(); - let len = bytes.len(); - - if len == 0 || len > TokenSymbol::MAX_SYMBOL_LENGTH { - return Err(TokenSymbolError::InvalidLength(len)); +impl From for Felt { + fn from(symbol: TokenSymbol) -> Self { + symbol.as_element() } +} - let mut encoded_value: u64 = 0; - let mut idx = 0; - - while idx < len { - let byte = bytes[idx]; - - if !byte.is_ascii_uppercase() { - return Err(TokenSymbolError::InvalidCharacter); - } - - let digit = (byte - b'A') as u64; - encoded_value = encoded_value * TokenSymbol::ALPHABET_LENGTH + digit; - idx += 1; +impl From<&TokenSymbol> for Felt { + fn from(symbol: &TokenSymbol) -> Self { + symbol.as_element() } +} - // add token length to the encoded value to be able to decode the exact number of characters - encoded_value = encoded_value * TokenSymbol::ALPHABET_LENGTH + len as u64; +impl TryFrom<&str> for TokenSymbol { + type Error = TokenSymbolError; - Ok(Felt::new(encoded_value)) + fn try_from(symbol: &str) -> Result { + TokenSymbol::new(symbol) + } } -/// Decodes a [Felt] representation of the token symbol into a string. +/// Decodes a [`Felt`] representation of the token symbol into a [`TokenSymbol`]. /// /// The alphabet used in the decoding process consists of the Latin capital letters as defined in /// the ASCII table, having the length of 26 characters. /// -/// The decoding is performed by getting the modulus of the intermediate encrypted value by the +/// The decoding is performed by getting the modulus of the intermediate encoded value by the /// length of the used alphabet and then dividing the intermediate value by the length of the -/// alphabet to shift to the next character. At the beginning of the decoding process the length of -/// the initial token string is obtained from the encrypted value. After that the value obtained +/// alphabet to shift to the next character. At the beginning of the decoding process the length +/// of the initial token string is obtained from the encoded value. After that the value obtained /// after taking the modulus represents the relative character index, which then gets converted to /// the ASCII index. /// -/// Final ASCII character idex is computed by adding the index of the character "A" (65) to the -/// index of the currently processing character, e.g., `A = 0 + 65 = 65`, `B = 1 + 65 = 66`, `...` , -/// `Z = 25 + 65 = 90`. -/// -/// # Errors -/// Returns an error if: -/// - The encoded value exceeds the maximum value of [`TokenSymbol::MAX_ENCODED_VALUE`]. -/// - The encoded token string length is less than 1 or greater than 12. -/// - The encoded token string length is less than the actual string length. -fn decode_felt_to_symbol(encoded_felt: Felt) -> Result { - let encoded_value = encoded_felt.as_int(); - - // Check if the encoded value is within the valid range - if encoded_value > TokenSymbol::MAX_ENCODED_VALUE { - return Err(TokenSymbolError::ValueTooLarge(encoded_value)); - } +/// Final ASCII character index is computed by adding the index of the character "A" (65) to the +/// index of the currently processing character, e.g., `A = 0 + 65 = 65`, `B = 1 + 65 = 66`, +/// `...` , `Z = 25 + 65 = 90`. +impl TryFrom for TokenSymbol { + type Error = TokenSymbolError; - let mut decoded_string = String::new(); - let mut remaining_value = encoded_value; + fn try_from(felt: Felt) -> Result { + let encoded_value = felt.as_canonical_u64(); + if encoded_value < Self::MIN_ENCODED_VALUE { + return Err(TokenSymbolError::ValueTooSmall(encoded_value)); + } + if encoded_value > Self::MAX_ENCODED_VALUE { + return Err(TokenSymbolError::ValueTooLarge(encoded_value)); + } - // get the token symbol length - let token_len = (remaining_value % TokenSymbol::ALPHABET_LENGTH) as usize; - if token_len == 0 || token_len > TokenSymbol::MAX_SYMBOL_LENGTH { - return Err(TokenSymbolError::InvalidLength(token_len)); - } - remaining_value /= TokenSymbol::ALPHABET_LENGTH; + let mut decoded_string = String::new(); + let mut remaining_value = encoded_value; - for _ in 0..token_len { - let digit = (remaining_value % TokenSymbol::ALPHABET_LENGTH) as u8; - let char = (digit + b'A') as char; - decoded_string.insert(0, char); - remaining_value /= TokenSymbol::ALPHABET_LENGTH; - } + // get the token symbol length + let token_len = (remaining_value % Self::ALPHABET_LENGTH) as usize; + if token_len == 0 || token_len > Self::MAX_SYMBOL_LENGTH { + return Err(TokenSymbolError::InvalidLength(token_len)); + } + remaining_value /= Self::ALPHABET_LENGTH; - // return an error if some data still remains after specified number of characters have been - // decoded. - if remaining_value != 0 { - return Err(TokenSymbolError::DataNotFullyDecoded); - } + for _ in 0..token_len { + let digit = (remaining_value % Self::ALPHABET_LENGTH) as u8; + let char = (digit + b'A') as char; + decoded_string.insert(0, char); + remaining_value /= Self::ALPHABET_LENGTH; + } + + // return an error if some data still remains after specified number of characters have + // been decoded. + if remaining_value != 0 { + return Err(TokenSymbolError::DataNotFullyDecoded); + } - Ok(decoded_string) + Ok(TokenSymbol(decoded_string)) + } } // TESTS @@ -203,15 +180,11 @@ fn decode_felt_to_symbol(encoded_felt: Felt) -> Result #[cfg(test)] mod test { + use alloc::string::ToString; + use assert_matches::assert_matches; - use super::{ - Felt, - TokenSymbol, - TokenSymbolError, - decode_felt_to_symbol, - encode_symbol_to_felt, - }; + use super::{Felt, TokenSymbol, TokenSymbolError}; #[test] fn test_token_symbol_decoding_encoding() { @@ -230,43 +203,37 @@ mod test { ]; for symbol in symbols { let token_symbol = TokenSymbol::try_from(symbol).unwrap(); - let decoded_symbol = TokenSymbol::to_string(&token_symbol).unwrap(); + let decoded_symbol = token_symbol.to_string(); assert_eq!(symbol, decoded_symbol); } - let symbol = ""; - let felt = encode_symbol_to_felt(symbol); - assert_matches!(felt.unwrap_err(), TokenSymbolError::InvalidLength(0)); + let err = TokenSymbol::new("").unwrap_err(); + assert_matches!(err, TokenSymbolError::InvalidLength(0)); - let symbol = "ABCDEFGHIJKLM"; - let felt = encode_symbol_to_felt(symbol); - assert_matches!(felt.unwrap_err(), TokenSymbolError::InvalidLength(13)); + let err = TokenSymbol::new("ABCDEFGHIJKLM").unwrap_err(); + assert_matches!(err, TokenSymbolError::InvalidLength(13)); - let symbol = "$$$"; - let felt = encode_symbol_to_felt(symbol); - assert_matches!(felt.unwrap_err(), TokenSymbolError::InvalidCharacter); + let err = TokenSymbol::new("$$$").unwrap_err(); + assert_matches!(err, TokenSymbolError::InvalidCharacter); let symbol = "ABCDEFGHIJKL"; - let token_symbol = TokenSymbol::try_from(symbol); - assert!(token_symbol.is_ok()); - let token_symbol_felt: Felt = token_symbol.unwrap().into(); - assert_eq!(token_symbol_felt, encode_symbol_to_felt(symbol).unwrap()); + let token_symbol = TokenSymbol::new(symbol).unwrap(); + let token_symbol_felt: Felt = token_symbol.into(); + assert_eq!(token_symbol_felt, TokenSymbol::new(symbol).unwrap().as_element()); } /// Checks that if the encoded length of the token is less than the actual number of token - /// characters, [decode_felt_to_symbol] procedure should return the - /// [TokenSymbolError::DataNotFullyDecoded] error. + /// characters, decoding should return the [TokenSymbolError::DataNotFullyDecoded] error. #[test] fn test_invalid_token_len() { // encoded value of this token has `6` as the length of the initial token string let encoded_symbol = TokenSymbol::try_from("ABCDEF").unwrap(); // decrease encoded length by, for example, `3` - let invalid_encoded_symbol_u64 = Felt::from(encoded_symbol).as_int() - 3; + let invalid_encoded_symbol_u64 = Felt::from(encoded_symbol).as_canonical_u64() - 3; - // check that `decode_felt_to_symbol()` procedure returns an error in attempt to create a - // token from encoded token with invalid length - let err = decode_felt_to_symbol(Felt::new(invalid_encoded_symbol_u64)).unwrap_err(); + // check that decoding returns an error for a token with invalid length + let err = TokenSymbol::try_from(Felt::new(invalid_encoded_symbol_u64)).unwrap_err(); assert_matches!(err, TokenSymbolError::DataNotFullyDecoded); } @@ -275,62 +242,65 @@ mod test { #[test] fn test_token_symbol_max_value() { let token_symbol = TokenSymbol::try_from("ZZZZZZZZZZZZ").unwrap(); - assert_eq!(Felt::from(token_symbol).as_int(), TokenSymbol::MAX_ENCODED_VALUE); + assert_eq!(Felt::from(token_symbol).as_canonical_u64(), TokenSymbol::MAX_ENCODED_VALUE); } - // Const function tests - // -------------------------------------------------------------------------------------------- + /// Utility test to make sure that the [TokenSymbol::MIN_ENCODED_VALUE] constant still + /// represents the minimum possible encoded value. + #[test] + fn test_token_symbol_min_value() { + let token_symbol = TokenSymbol::try_from("A").unwrap(); + assert_eq!(Felt::from(token_symbol).as_canonical_u64(), TokenSymbol::MIN_ENCODED_VALUE); + } - const _TOKEN0: TokenSymbol = TokenSymbol::from_static_str("A"); - const _TOKEN1: TokenSymbol = TokenSymbol::from_static_str("ETH"); - const _TOKEN2: TokenSymbol = TokenSymbol::from_static_str("MIDEN"); - const _TOKEN3: TokenSymbol = TokenSymbol::from_static_str("ZZZZZZ"); - const _TOKEN4: TokenSymbol = TokenSymbol::from_static_str("ABCDEFGH"); - const _TOKEN5: TokenSymbol = TokenSymbol::from_static_str("ZZZZZZZZZZZZ"); + /// Checks that [TokenSymbol::try_from(Felt)] returns an error for values below the minimum. + #[test] + fn test_token_symbol_underflow() { + let err = TokenSymbol::try_from(Felt::ZERO).unwrap_err(); + assert_matches!(err, TokenSymbolError::ValueTooSmall(0)); + } + + // new_unchecked tests + // -------------------------------------------------------------------------------------------- #[test] - fn test_from_static_str_matches_new() { - // Test that from_static_str produces the same result as new + fn test_new_unchecked_matches_new() { + // Test that new_unchecked produces the same result as new let symbols = ["A", "BC", "ETH", "MIDEN", "ZZZZZZ", "ABCDEFGH", "ZZZZZZZZZZZZ"]; for symbol in symbols { let from_new = TokenSymbol::new(symbol).unwrap(); - let from_static = TokenSymbol::from_static_str(symbol); - assert_eq!( - Felt::from(from_new), - Felt::from(from_static), - "Mismatch for symbol: {}", - symbol - ); + let from_static = TokenSymbol::new_unchecked(symbol); + assert_eq!(from_new, from_static, "Mismatch for symbol: {}", symbol); } } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_empty_string() { - TokenSymbol::from_static_str(""); + TokenSymbol::new_unchecked(""); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_too_long_string() { - TokenSymbol::from_static_str("ABCDEFGHIJKLM"); + TokenSymbol::new_unchecked("ABCDEFGHIJKLM"); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_lowercase() { - TokenSymbol::from_static_str("eth"); + TokenSymbol::new_unchecked("eth"); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_invalid_character() { - TokenSymbol::from_static_str("ET$"); + TokenSymbol::new_unchecked("ET$"); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_number() { - TokenSymbol::from_static_str("ETH1"); + TokenSymbol::new_unchecked("ETH1"); } } diff --git a/crates/miden-protocol/src/asset/vault/asset_id.rs b/crates/miden-protocol/src/asset/vault/asset_id.rs new file mode 100644 index 0000000000..5976ca4df9 --- /dev/null +++ b/crates/miden-protocol/src/asset/vault/asset_id.rs @@ -0,0 +1,43 @@ +use core::fmt::Display; + +use crate::Felt; + +/// The [`AssetId`] in an [`AssetVaultKey`](crate::asset::AssetVaultKey) distinguishes different +/// assets issued by the same faucet. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub struct AssetId { + suffix: Felt, + prefix: Felt, +} + +impl AssetId { + /// Constructs an asset ID from its parts. + pub fn new(suffix: Felt, prefix: Felt) -> Self { + Self { suffix, prefix } + } + + /// Returns the suffix of the asset ID. + pub fn suffix(&self) -> Felt { + self.suffix + } + + /// Returns the prefix of the asset ID. + pub fn prefix(&self) -> Felt { + self.prefix + } + + /// Returns `true` if both prefix and suffix are zero, `false` otherwise. + pub fn is_empty(&self) -> bool { + self.prefix == Felt::ZERO && self.suffix == Felt::ZERO + } +} + +impl Display for AssetId { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_fmt(format_args!( + "0x{:016x}{:016x}", + self.prefix().as_canonical_u64(), + self.suffix().as_canonical_u64() + )) + } +} diff --git a/crates/miden-protocol/src/asset/vault/asset_witness.rs b/crates/miden-protocol/src/asset/vault/asset_witness.rs index 503b468d41..f289b3e92a 100644 --- a/crates/miden-protocol/src/asset/vault/asset_witness.rs +++ b/crates/miden-protocol/src/asset/vault/asset_witness.rs @@ -1,3 +1,4 @@ +use alloc::boxed::Box; use alloc::string::ToString; use miden_crypto::merkle::InnerNodeInfo; @@ -6,7 +7,13 @@ use miden_crypto::merkle::smt::{SmtLeaf, SmtProof}; use super::vault_key::AssetVaultKey; use crate::asset::Asset; use crate::errors::AssetError; -use crate::utils::serde::{Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A witness of an asset in an [`AssetVault`](super::AssetVault). /// @@ -23,17 +30,12 @@ impl AssetWitness { /// # Errors /// /// Returns an error if: - /// - any of the entries in the SMT leaf is not a valid asset. - /// - any of the entries' vault keys does not match the expected vault key of the asset. + /// - any of the key value pairs in the SMT leaf do not form a valid asset. pub fn new(smt_proof: SmtProof) -> Result { - for (vault_key, asset) in smt_proof.leaf().entries() { - let asset = Asset::try_from(asset)?; - if *vault_key != asset.vault_key().into() { - return Err(AssetError::AssetVaultKeyMismatch { - actual: *vault_key, - expected: asset.vault_key().into(), - }); - } + for (vault_key, asset_value) in smt_proof.leaf().entries() { + // This ensures that vault key and value are consistent. + Asset::from_key_value_words(*vault_key, *asset_value) + .map_err(|err| AssetError::AssetWitnessInvalid(Box::new(err)))?; } Ok(Self(smt_proof)) @@ -72,8 +74,9 @@ impl AssetWitness { SmtLeaf::Multiple(kv_pairs) => kv_pairs, }; - entries.iter().map(|(_key, value)| { - Asset::try_from(value).expect("asset witness should track valid assets") + entries.iter().map(|(key, value)| { + Asset::from_key_value_words(*key, *value) + .expect("asset witness should track valid assets") }) } @@ -81,7 +84,7 @@ impl AssetWitness { pub fn authenticated_nodes(&self) -> impl Iterator + '_ { self.0 .path() - .authenticated_nodes(self.0.leaf().index().value(), self.0.leaf().hash()) + .authenticated_nodes(self.0.leaf().index().position(), self.0.leaf().hash()) .expect("leaf index is u64 and should be less than 2^SMT_DEPTH") } } @@ -93,15 +96,13 @@ impl From for SmtProof { } impl Serializable for AssetWitness { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.0.write_into(target); } } impl Deserializable for AssetWitness { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let proof = SmtProof::read_from(source)?; Self::new(proof).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } @@ -132,7 +133,9 @@ mod tests { let err = AssetWitness::new(proof).unwrap_err(); - assert_matches!(err, AssetError::InvalidFaucetAccountId(_)); + assert_matches!(err, AssetError::AssetWitnessInvalid(source) => { + assert_matches!(*source, AssetError::InvalidFaucetAccountId(_)); + }); Ok(()) } @@ -144,15 +147,16 @@ mod tests { let fungible_asset = FungibleAsset::mock(500); let non_fungible_asset = NonFungibleAsset::mock(&[1]); - let smt = - Smt::with_entries([(fungible_asset.vault_key().into(), non_fungible_asset.into())])?; + let smt = Smt::with_entries([( + fungible_asset.vault_key().into(), + non_fungible_asset.to_value_word(), + )])?; let proof = smt.open(&fungible_asset.vault_key().into()); let err = AssetWitness::new(proof).unwrap_err(); - assert_matches!(err, AssetError::AssetVaultKeyMismatch { actual, expected } => { - assert_eq!(actual, fungible_asset.vault_key().into()); - assert_eq!(expected, non_fungible_asset.vault_key().into()); + assert_matches!(err, AssetError::AssetWitnessInvalid(source) => { + assert_matches!(*source, AssetError::FungibleAssetValueMostSignificantElementsMustBeZero(_)); }); Ok(()) diff --git a/crates/miden-protocol/src/asset/vault/mod.rs b/crates/miden-protocol/src/asset/vault/mod.rs index b74c9abfc4..e9ea206196 100644 --- a/crates/miden-protocol/src/asset/vault/mod.rs +++ b/crates/miden-protocol/src/asset/vault/mod.rs @@ -1,11 +1,12 @@ use alloc::string::ToString; +use alloc::vec::Vec; use miden_crypto::merkle::InnerNodeInfo; -use miden_processor::SMT_DEPTH; use super::{ AccountType, Asset, + AssetAmount, ByteReader, ByteWriter, Deserializable, @@ -16,7 +17,7 @@ use super::{ }; use crate::Word; use crate::account::{AccountId, AccountVaultDelta, NonFungibleDeltaAction}; -use crate::crypto::merkle::smt::Smt; +use crate::crypto::merkle::smt::{SMT_DEPTH, Smt}; use crate::errors::AssetVaultError; mod partial; @@ -28,6 +29,9 @@ pub use asset_witness::AssetWitness; mod vault_key; pub use vault_key::AssetVaultKey; +mod asset_id; +pub use asset_id::AssetId; + // ASSET VAULT // ================================================================================================ @@ -61,7 +65,7 @@ impl AssetVault { pub fn new(assets: &[Asset]) -> Result { Ok(Self { asset_tree: Smt::with_entries( - assets.iter().map(|asset| (asset.vault_key().into(), (*asset).into())), + assets.iter().map(|asset| (asset.vault_key().to_word(), asset.to_value_word())), ) .map_err(AssetVaultError::DuplicateAsset)?, }) @@ -78,19 +82,22 @@ impl AssetVault { /// Returns the asset corresponding to the provided asset vault key, or `None` if the asset /// doesn't exist. pub fn get(&self, asset_vault_key: AssetVaultKey) -> Option { - let word = self.asset_tree.get_value(asset_vault_key.as_word()); + let asset_value = self.asset_tree.get_value(&asset_vault_key.to_word()); - if word.is_empty() { + if asset_value.is_empty() { None } else { - Some(Asset::try_from(word).expect("asset vault should only store valid assets")) + Some( + Asset::from_key_value(asset_vault_key, asset_value) + .expect("asset vault should only store valid assets"), + ) } } /// Returns true if the specified non-fungible asset is stored in this vault. pub fn has_non_fungible_asset(&self, asset: NonFungibleAsset) -> Result { // check if the asset is stored in the vault - match self.asset_tree.get_value(&asset.vault_key().into()) { + match self.asset_tree.get_value(&asset.vault_key().to_word()) { asset if asset == Smt::EMPTY_VALUE => Ok(false), _ => Ok(true), } @@ -106,21 +113,22 @@ impl AssetVault { return Err(AssetVaultError::NotAFungibleFaucetId(faucet_id)); } - // if the tree value is [0, 0, 0, 0], the asset is not stored in the vault - match self.asset_tree.get_value( - &AssetVaultKey::from_account_id(faucet_id) - .expect("faucet ID should be of type fungible") - .into(), - ) { - asset if asset == Smt::EMPTY_VALUE => Ok(0), - asset => Ok(FungibleAsset::new_unchecked(asset).amount()), - } + let vault_key = + AssetVaultKey::new_fungible(faucet_id).expect("faucet ID should be of type fungible"); + let asset_value = self.asset_tree.get_value(&vault_key.to_word()); + let asset = FungibleAsset::from_key_value(vault_key, asset_value) + .expect("asset vault should only store valid assets"); + + Ok(asset.amount().inner()) } /// Returns an iterator over the assets stored in the vault. pub fn assets(&self) -> impl Iterator + '_ { // SAFETY: The asset tree tracks only valid assets. - self.asset_tree.entries().map(|(_key, value)| Asset::new_unchecked(*value)) + self.asset_tree.entries().map(|(key, value)| { + Asset::from_key_value_words(*key, *value) + .expect("asset vault should only store valid assets") + }) } /// Returns an iterator over the inner nodes of the underlying [`Smt`]. @@ -132,7 +140,7 @@ impl AssetVault { /// /// The `vault_key` can be obtained with [`Asset::vault_key`]. pub fn open(&self, vault_key: AssetVaultKey) -> AssetWitness { - let smt_proof = self.asset_tree.open(&vault_key.into()); + let smt_proof = self.asset_tree.open(&vault_key.to_word()); // SAFETY: The asset vault should only contain valid assets. AssetWitness::new_unchecked(smt_proof) } @@ -165,16 +173,19 @@ impl AssetVault { /// /// # Errors /// Returns an error: - /// - If the total value of assets is greater than or equal to 2^63. + /// - If the total value of the added assets is greater than [`FungibleAsset::MAX_AMOUNT`]. /// - If the delta contains an addition/subtraction for a fungible asset that is not stored in /// the vault. /// - If the delta contains a non-fungible asset removal that is not stored in the vault. /// - If the delta contains a non-fungible asset addition that is already stored in the vault. /// - The maximum number of leaves per asset is exceeded. pub fn apply_delta(&mut self, delta: &AccountVaultDelta) -> Result<(), AssetVaultError> { - for (&faucet_id, &delta) in delta.fungible().iter() { - let asset = FungibleAsset::new(faucet_id, delta.unsigned_abs()) - .expect("Not a fungible faucet ID or delta is too large"); + for (vault_key, &delta) in delta.fungible().iter() { + // SAFETY: fungible asset delta should only contain fungible faucet IDs and delta amount + // should be in bounds + let asset = FungibleAsset::new(vault_key.faucet_id(), delta.unsigned_abs()) + .expect("fungible asset delta should be valid") + .with_callbacks(vault_key.callback_flag()); match delta >= 0 { true => self.add_fungible_asset(asset), false => self.remove_fungible_asset(asset), @@ -183,9 +194,13 @@ impl AssetVault { for (&asset, &action) in delta.non_fungible().iter() { match action { - NonFungibleDeltaAction::Add => self.add_non_fungible_asset(asset), - NonFungibleDeltaAction::Remove => self.remove_non_fungible_asset(asset), - }?; + NonFungibleDeltaAction::Add => { + self.add_non_fungible_asset(asset)?; + }, + NonFungibleDeltaAction::Remove => { + self.remove_non_fungible_asset(asset)?; + }, + } } Ok(()) @@ -196,7 +211,7 @@ impl AssetVault { /// Add the specified asset to the vault. /// /// # Errors - /// - If the total value of two fungible assets is greater than or equal to 2^63. + /// - If the total value of the added assets is greater than [`FungibleAsset::MAX_AMOUNT`]. /// - If the vault already contains the same non-fungible asset. /// - The maximum number of leaves per asset is exceeded. pub fn add_asset(&mut self, asset: Asset) -> Result { @@ -210,26 +225,26 @@ impl AssetVault { /// issued by the same faucet, the amounts are added together. /// /// # Errors - /// - If the total value of assets is greater than or equal to 2^63. + /// - If the total value of the added assets is greater than [`FungibleAsset::MAX_AMOUNT`]. /// - The maximum number of leaves per asset is exceeded. fn add_fungible_asset( &mut self, - asset: FungibleAsset, + other_asset: FungibleAsset, ) -> Result { - // fetch current asset value from the tree and add the new asset to it. - let new: FungibleAsset = match self.asset_tree.get_value(&asset.vault_key().into()) { - current if current == Smt::EMPTY_VALUE => asset, - current => { - let current = FungibleAsset::new_unchecked(current); - current.add(asset).map_err(AssetVaultError::AddFungibleAssetBalanceError)? - }, - }; + let current_asset_value = self.asset_tree.get_value(&other_asset.vault_key().to_word()); + let current_asset = + FungibleAsset::from_key_value(other_asset.vault_key(), current_asset_value) + .expect("asset vault should store valid assets"); + + let new_asset = current_asset + .add(other_asset) + .map_err(AssetVaultError::AddFungibleAssetBalanceError)?; + self.asset_tree - .insert(new.vault_key().into(), new.into()) + .insert(new_asset.vault_key().to_word(), new_asset.to_value_word()) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; - // return the new asset - Ok(new) + Ok(new_asset) } /// Add the specified non-fungible asset to the vault. @@ -244,7 +259,7 @@ impl AssetVault { // add non-fungible asset to the vault let old = self .asset_tree - .insert(asset.vault_key().into(), asset.into()) + .insert(asset.vault_key().to_word(), asset.to_value_word()) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; // if the asset already exists, return an error @@ -257,27 +272,32 @@ impl AssetVault { // REMOVE ASSET // -------------------------------------------------------------------------------------------- - /// Remove the specified asset from the vault and returns the asset that was just removed. + /// Remove the specified asset from the vault and returns the remaining asset, if any. + /// + /// - For fungible assets, returns `Some(Asset::Fungible(remaining))` with the remaining balance + /// (which may have amount 0). + /// - For non-fungible assets, returns `None` since non-fungible assets are either fully present + /// or absent. /// /// # Errors /// - The fungible asset is not found in the vault. /// - The amount of the fungible asset in the vault is less than the amount to be removed. /// - The non-fungible asset is not found in the vault. - pub fn remove_asset(&mut self, asset: Asset) -> Result { + pub fn remove_asset(&mut self, asset: Asset) -> Result, AssetVaultError> { match asset { Asset::Fungible(asset) => { - let asset = self.remove_fungible_asset(asset)?; - Ok(Asset::Fungible(asset)) + let remaining = self.remove_fungible_asset(asset)?; + Ok(Some(Asset::Fungible(remaining))) }, Asset::NonFungible(asset) => { - let asset = self.remove_non_fungible_asset(asset)?; - Ok(Asset::NonFungible(asset)) + self.remove_non_fungible_asset(asset)?; + Ok(None) }, } } - /// Remove the specified fungible asset from the vault and returns the asset that was just - /// removed. If the final amount of the asset is zero, the asset is removed from the vault. + /// Remove the specified fungible asset from the vault and returns the remaining fungible + /// asset. If the final amount of the asset is zero, the asset is removed from the vault. /// /// # Errors /// - The asset is not found in the vault. @@ -285,34 +305,40 @@ impl AssetVault { /// - The maximum number of leaves per asset is exceeded. fn remove_fungible_asset( &mut self, - asset: FungibleAsset, + other_asset: FungibleAsset, ) -> Result { - // fetch the asset from the vault. - let new: FungibleAsset = match self.asset_tree.get_value(&asset.vault_key().into()) { - current if current == Smt::EMPTY_VALUE => { - return Err(AssetVaultError::FungibleAssetNotFound(asset)); - }, - current => { - let current = FungibleAsset::new_unchecked(current); - current.sub(asset).map_err(AssetVaultError::SubtractFungibleAssetBalanceError)? - }, - }; + let current_asset_value = self.asset_tree.get_value(&other_asset.vault_key().to_word()); + let current_asset = + FungibleAsset::from_key_value(other_asset.vault_key(), current_asset_value) + .expect("asset vault should store valid assets"); + + // If the asset's amount is 0, we consider it absent from the vault. + if current_asset.amount() == AssetAmount::ZERO { + return Err(AssetVaultError::FungibleAssetNotFound(other_asset)); + } + + let new_asset = current_asset + .sub(other_asset) + .map_err(AssetVaultError::SubtractFungibleAssetBalanceError)?; + + // Note that if new_asset's amount is 0, its value's word representation is equal to + // the empty word, which results in the removal of the entire entry from the corresponding + // leaf. + #[cfg(debug_assertions)] + { + if new_asset.amount() == AssetAmount::ZERO { + assert!(new_asset.to_value_word().is_empty()) + } + } - // if the amount of the asset is zero, remove the asset from the vault. - let value = match new.amount() { - 0 => Smt::EMPTY_VALUE, - _ => new.into(), - }; self.asset_tree - .insert(new.vault_key().into(), value) + .insert(new_asset.vault_key().to_word(), new_asset.to_value_word()) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; - // return the asset that was removed. - Ok(asset) + Ok(new_asset) } - /// Remove the specified non-fungible asset from the vault and returns the asset that was just - /// removed. + /// Remove the specified non-fungible asset from the vault. /// /// # Errors /// - The non-fungible asset is not found in the vault. @@ -320,11 +346,11 @@ impl AssetVault { fn remove_non_fungible_asset( &mut self, asset: NonFungibleAsset, - ) -> Result { + ) -> Result<(), AssetVaultError> { // remove the asset from the vault. let old = self .asset_tree - .insert(asset.vault_key().into(), Smt::EMPTY_VALUE) + .insert(asset.vault_key().to_word(), Smt::EMPTY_VALUE) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; // return an error if the asset did not exist in the vault. @@ -332,8 +358,7 @@ impl AssetVault { return Err(AssetVaultError::NonFungibleAssetNotFound(asset)); } - // return the asset that was removed. - Ok(asset) + Ok(()) } } @@ -365,7 +390,24 @@ impl Serializable for AssetVault { impl Deserializable for AssetVault { fn read_from(source: &mut R) -> Result { let num_assets = source.read_usize()?; - let assets = source.read_many::(num_assets)?; + let assets = source.read_many_iter::(num_assets)?.collect::, _>>()?; Self::new(&assets).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn vault_fails_on_absent_fungible_asset() { + let mut vault = AssetVault::default(); + let err = vault.remove_asset(FungibleAsset::mock(50)).unwrap_err(); + assert_matches!(err, AssetVaultError::FungibleAssetNotFound(_)); + } +} diff --git a/crates/miden-protocol/src/asset/vault/partial.rs b/crates/miden-protocol/src/asset/vault/partial.rs index 1427a8902c..970d3c8508 100644 --- a/crates/miden-protocol/src/asset/vault/partial.rs +++ b/crates/miden-protocol/src/asset/vault/partial.rs @@ -7,7 +7,13 @@ use super::{AssetVault, AssetVaultKey}; use crate::Word; use crate::asset::{Asset, AssetWitness}; use crate::errors::PartialAssetVaultError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of an [`AssetVault`], containing only proofs for a subset of assets. /// @@ -98,13 +104,16 @@ impl PartialVault { /// Returns an error if: /// - the key is not tracked by this partial SMT. pub fn get(&self, vault_key: AssetVaultKey) -> Result, MerkleError> { - self.partial_smt.get_value(&vault_key.into()).map(|word| { - if word.is_empty() { + self.partial_smt.get_value(&vault_key.into()).map(|asset_value| { + if asset_value.is_empty() { None } else { // SAFETY: If this returned a non-empty word, then it should be a valid asset, // because the vault should only track valid ones. - Some(Asset::try_from(word).expect("partial vault should only track valid assets")) + Some( + Asset::from_key_value(vault_key, asset_value) + .expect("partial vault should only track valid assets"), + ) } }) } @@ -136,17 +145,11 @@ impl PartialVault { fn validate_entries<'a>( entries: impl IntoIterator, ) -> Result<(), PartialAssetVaultError> { - for (vault_key, asset) in entries { - let asset = Asset::try_from(asset).map_err(|source| { - PartialAssetVaultError::InvalidAssetInSmt { entry: *asset, source } + for (vault_key, asset_value) in entries { + // This ensures that vault key and value are consistent. + Asset::from_key_value_words(*vault_key, *asset_value).map_err(|source| { + PartialAssetVaultError::InvalidAssetInSmt { entry: *asset_value, source } })?; - - if *vault_key != asset.vault_key().into() { - return Err(PartialAssetVaultError::AssetVaultKeyMismatch { - expected: asset.vault_key(), - actual: *vault_key, - }); - } } Ok(()) @@ -216,15 +219,12 @@ mod tests { fn partial_vault_ensures_asset_vault_key_matches() -> anyhow::Result<()> { let asset = FungibleAsset::mock(500); let invalid_vault_key = Word::from([0, 1, 2, 3u32]); - let smt = Smt::with_entries([(invalid_vault_key, asset.into())])?; + let smt = Smt::with_entries([(invalid_vault_key, asset.to_value_word())])?; let proof = smt.open(&invalid_vault_key); let partial_smt = PartialSmt::from_proofs([proof.clone()])?; let err = PartialVault::try_from(partial_smt).unwrap_err(); - assert_matches!(err, PartialAssetVaultError::AssetVaultKeyMismatch { expected, actual } => { - assert_eq!(actual, invalid_vault_key); - assert_eq!(expected, asset.vault_key()); - }); + assert_matches!(err, PartialAssetVaultError::InvalidAssetInSmt { .. }); Ok(()) } diff --git a/crates/miden-protocol/src/asset/vault/vault_key.rs b/crates/miden-protocol/src/asset/vault/vault_key.rs index 1d3d2a6914..290d7d6ba3 100644 --- a/crates/miden-protocol/src/asset/vault/vault_key.rs +++ b/crates/miden-protocol/src/asset/vault/vault_key.rs @@ -1,110 +1,216 @@ +use alloc::boxed::Box; +use alloc::string::ToString; use core::fmt; +use miden_core::LexicographicWord; use miden_crypto::merkle::smt::LeafIndex; -use miden_processor::SMT_DEPTH; -use crate::Word; -use crate::account::AccountType::FungibleFaucet; -use crate::account::{AccountId, AccountIdPrefix}; -use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; +use crate::account::AccountId; +use crate::account::AccountType::{self}; +use crate::asset::vault::AssetId; +use crate::asset::{Asset, AssetCallbackFlag, FungibleAsset, NonFungibleAsset}; +use crate::crypto::merkle::smt::SMT_DEPTH; +use crate::errors::AssetError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Word}; -/// The key of an [`Asset`] in the asset vault. +/// The unique identifier of an [`Asset`] in the [`AssetVault`](crate::asset::AssetVault). /// -/// The layout of an asset key is: -/// - Fungible asset key: `[0, 0, faucet_id_suffix, faucet_id_prefix]`. -/// - Non-fungible asset key: `[faucet_id_prefix, hash1, hash2, hash0']`, where `hash0'` is -/// equivalent to `hash0` with the fungible bit set to `0`. See [`NonFungibleAsset::vault_key`] -/// for more details. -/// -/// For details on the layout of an asset, see the documentation of [`Asset`]. -/// -/// ## Guarantees -/// -/// This type guarantees that it contains a valid fungible or non-fungible asset key: -/// - For fungible assets -/// - The felt at index 3 has the fungible bit set to 1 and it is a valid account ID prefix. -/// - The felt at index 2 is a valid account ID suffix. -/// - For non-fungible assets -/// - The felt at index 3 has the fungible bit set to 0. -/// - The felt at index 0 is a valid account ID prefix. -/// -/// The fungible bit is the bit in the [`AccountId`] that encodes whether the ID is a faucet. -#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)] -pub struct AssetVaultKey(Word); +/// Its [`Word`] layout is: +/// ```text +/// [ +/// asset_id_suffix (64 bits), +/// asset_id_prefix (64 bits), +/// [faucet_id_suffix (56 bits) | 7 zero bits | callbacks_enabled (1 bit)], +/// faucet_id_prefix (64 bits) +/// ] +/// ``` +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub struct AssetVaultKey { + /// The asset ID of the vault key. + asset_id: AssetId, + + /// The ID of the faucet that issued the asset. + faucet_id: AccountId, + + /// Determines whether callbacks are enabled. + callback_flag: AssetCallbackFlag, +} impl AssetVaultKey { - /// Creates a new [`AssetVaultKey`] from the given [`Word`] **without performing validation**. + /// The serialized size of an [`AssetVaultKey`] in bytes. + /// + /// Serialized as its [`Word`] representation (4 field elements). + pub const SERIALIZED_SIZE: usize = Word::SERIALIZED_SIZE; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates an [`AssetVaultKey`] for a native asset with callbacks disabled. /// - /// ## Warning + /// # Errors /// - /// This function **does not check** whether the provided `Word` represents a valid - /// fungible or non-fungible asset key. - pub fn new_unchecked(value: Word) -> Self { - Self(value) + /// Returns an error if: + /// - the provided ID is not of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet) or + /// [`AccountType::NonFungibleFaucet`](crate::account::AccountType::NonFungibleFaucet) + /// - the asset ID limbs are not zero when `faucet_id` is of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet). + pub fn new_native(asset_id: AssetId, faucet_id: AccountId) -> Result { + Self::new(asset_id, faucet_id, AssetCallbackFlag::Disabled) } - /// Returns an [`AccountIdPrefix`] from the asset key. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { - if self.is_fungible() { - AccountIdPrefix::new_unchecked(self.0[3]) - } else { - AccountIdPrefix::new_unchecked(self.0[0]) + /// Creates an [`AssetVaultKey`] from its parts with the given [`AssetCallbackFlag`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided ID is not of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet) or + /// [`AccountType::NonFungibleFaucet`](crate::account::AccountType::NonFungibleFaucet) + /// - the asset ID limbs are not zero when `faucet_id` is of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet). + pub fn new( + asset_id: AssetId, + faucet_id: AccountId, + callback_flag: AssetCallbackFlag, + ) -> Result { + if !faucet_id.is_faucet() { + return Err(AssetError::InvalidFaucetAccountId(Box::from(format!( + "expected account ID of type faucet, found account type {}", + faucet_id.account_type() + )))); } - } - /// Returns the [`AccountId`] from the asset key if it is a fungible asset, `None` otherwise. - pub fn faucet_id(&self) -> Option { - if self.is_fungible() { - Some(AccountId::new_unchecked([self.0[3], self.0[2]])) - } else { - None + if matches!(faucet_id.account_type(), AccountType::FungibleFaucet) && !asset_id.is_empty() { + return Err(AssetError::FungibleAssetIdMustBeZero(asset_id)); } + + Ok(Self { asset_id, faucet_id, callback_flag }) } - /// Returns the leaf index of a vault key. - pub fn to_leaf_index(&self) -> LeafIndex { - LeafIndex::::from(self.0) + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the word representation of the vault key. + /// + /// See the type-level documentation for details. + pub fn to_word(&self) -> Word { + let faucet_suffix = self.faucet_id.suffix().as_canonical_u64(); + // The lower 8 bits of the faucet suffix are guaranteed to be zero and so it is used to + // encode the asset metadata. + debug_assert!(faucet_suffix & 0xff == 0, "lower 8 bits of faucet suffix must be zero"); + let faucet_id_suffix_and_metadata = faucet_suffix | self.callback_flag.as_u8() as u64; + let faucet_id_suffix_and_metadata = Felt::try_from(faucet_id_suffix_and_metadata) + .expect("highest bit should still be zero resulting in a valid felt"); + + Word::new([ + self.asset_id.suffix(), + self.asset_id.prefix(), + faucet_id_suffix_and_metadata, + self.faucet_id.prefix().as_felt(), + ]) + } + + /// Returns the [`AssetId`] of the vault key that distinguishes different assets issued by the + /// same faucet. + pub fn asset_id(&self) -> AssetId { + self.asset_id + } + + /// Returns the [`AccountId`] of the faucet that issued the asset. + pub fn faucet_id(&self) -> AccountId { + self.faucet_id + } + + /// Returns the [`AssetCallbackFlag`] flag of the vault key. + pub fn callback_flag(&self) -> AssetCallbackFlag { + self.callback_flag } /// Constructs a fungible asset's key from a faucet ID. /// /// Returns `None` if the provided ID is not of type /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet) - pub fn from_account_id(faucet_id: AccountId) -> Option { - match faucet_id.account_type() { - FungibleFaucet => { - let mut key = Word::empty(); - key[2] = faucet_id.suffix(); - key[3] = faucet_id.prefix().as_felt(); - Some(AssetVaultKey::new_unchecked(key)) - }, - _ => None, + pub fn new_fungible(faucet_id: AccountId) -> Option { + if matches!(faucet_id.account_type(), AccountType::FungibleFaucet) { + let asset_id = AssetId::new(Felt::ZERO, Felt::ZERO); + Some( + Self::new_native(asset_id, faucet_id) + .expect("we should have account type fungible faucet"), + ) + } else { + None } } - /// Returns a reference to the inner [Word] of this key. - pub fn as_word(&self) -> &Word { - &self.0 + /// Returns the leaf index of a vault key. + pub fn to_leaf_index(&self) -> LeafIndex { + LeafIndex::::from(self.to_word()) + } +} + +// CONVERSIONS +// ================================================================================================ + +impl From for Word { + fn from(vault_key: AssetVaultKey) -> Self { + vault_key.to_word() } +} - /// Returns `true` if the asset key is for a fungible asset, `false` otherwise. - fn is_fungible(&self) -> bool { - self.0[0].as_int() == 0 && self.0[1].as_int() == 0 +impl Ord for AssetVaultKey { + /// Implements comparison based on [`LexicographicWord`]. + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + LexicographicWord::new(self.to_word()).cmp(&LexicographicWord::new(other.to_word())) } } -impl fmt::Display for AssetVaultKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) +impl PartialOrd for AssetVaultKey { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) } } -// CONVERSIONS -// ================================================================================================ +impl TryFrom for AssetVaultKey { + type Error = AssetError; -impl From for Word { - fn from(vault_key: AssetVaultKey) -> Self { - vault_key.0 + /// Attempts to convert the provided [`Word`] into an [`AssetVaultKey`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - the faucet ID in the key is invalid or not of a faucet type. + /// - the asset ID limbs are not zero when `faucet_id` is of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet). + fn try_from(key: Word) -> Result { + let asset_id_suffix = key[0]; + let asset_id_prefix = key[1]; + let faucet_id_suffix_and_metadata = key[2]; + let faucet_id_prefix = key[3]; + + let raw = faucet_id_suffix_and_metadata.as_canonical_u64(); + let callback_flag = AssetCallbackFlag::try_from((raw & 0xff) as u8)?; + let faucet_id_suffix = Felt::try_from(raw & 0xffff_ffff_ffff_ff00) + .expect("clearing lower bits should not produce an invalid felt"); + + let asset_id = AssetId::new(asset_id_suffix, asset_id_prefix); + let faucet_id = AccountId::try_from_elements(faucet_id_suffix, faucet_id_prefix) + .map_err(|err| AssetError::InvalidFaucetAccountId(Box::new(err)))?; + + Self::new(asset_id, faucet_id, callback_flag) + } +} + +impl fmt::Display for AssetVaultKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.to_word().to_hex()) } } @@ -126,56 +232,63 @@ impl From for AssetVaultKey { } } +// SERIALIZATION +// ================================================================================================ + +impl Serializable for AssetVaultKey { + fn write_into(&self, target: &mut W) { + self.to_word().write_into(target); + } + + fn get_size_hint(&self) -> usize { + Self::SERIALIZED_SIZE + } +} + +impl Deserializable for AssetVaultKey { + fn read_from(source: &mut R) -> Result { + let word: Word = source.read()?; + Self::try_from(word).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + // TESTS // ================================================================================================ #[cfg(test)] mod tests { - use miden_core::Felt; - use super::*; - use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; - - fn make_non_fungible_key(prefix: u64) -> AssetVaultKey { - let word = [Felt::new(prefix), Felt::new(11), Felt::new(22), Felt::new(33)].into(); - AssetVaultKey::new_unchecked(word) - } + use crate::asset::AssetCallbackFlag; + use crate::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, + }; #[test] - fn test_faucet_id_for_fungible_asset() { - let id = AccountId::dummy( - [0xff; 15], - AccountIdVersion::Version0, - AccountType::FungibleFaucet, - AccountStorageMode::Public, - ); + fn asset_vault_key_word_roundtrip() -> anyhow::Result<()> { + let fungible_faucet = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; + let nonfungible_faucet = AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET)?; - let key = - AssetVaultKey::from_account_id(id).expect("Expected AssetVaultKey for FungibleFaucet"); + for callback_flag in [AssetCallbackFlag::Disabled, AssetCallbackFlag::Enabled] { + // Fungible: asset_id must be zero. + let key = AssetVaultKey::new(AssetId::default(), fungible_faucet, callback_flag)?; - // faucet_id_prefix() should match AccountId prefix - assert_eq!(key.faucet_id_prefix(), id.prefix()); + let roundtripped = AssetVaultKey::try_from(key.to_word())?; + assert_eq!(key, roundtripped); + assert_eq!(key, AssetVaultKey::read_from_bytes(&key.to_bytes())?); - // faucet_id() should return the same account id - assert_eq!(key.faucet_id().unwrap(), id); - } + // Non-fungible: asset_id can be non-zero. + let key = AssetVaultKey::new( + AssetId::new(Felt::from(42u32), Felt::from(99u32)), + nonfungible_faucet, + callback_flag, + )?; - #[test] - fn test_faucet_id_for_non_fungible_asset() { - let id = AccountId::dummy( - [0xff; 15], - AccountIdVersion::Version0, - AccountType::NonFungibleFaucet, - AccountStorageMode::Public, - ); - - let prefix_value = id.prefix().as_u64(); - let key = make_non_fungible_key(prefix_value); - - // faucet_id_prefix() should match AccountId prefix - assert_eq!(key.faucet_id_prefix(), id.prefix()); - - // faucet_id() should return the None - assert_eq!(key.faucet_id(), None); + let roundtripped = AssetVaultKey::try_from(key.to_word())?; + assert_eq!(key, roundtripped); + assert_eq!(key, AssetVaultKey::read_from_bytes(&key.to_bytes())?); + } + + Ok(()) } } diff --git a/crates/miden-protocol/src/batch/batch_id.rs b/crates/miden-protocol/src/batch/batch_id.rs index bcbb50e16a..b84769cbc8 100644 --- a/crates/miden-protocol/src/batch/batch_id.rs +++ b/crates/miden-protocol/src/batch/batch_id.rs @@ -5,7 +5,13 @@ use miden_protocol_macros::WordWrapper; use crate::account::AccountId; use crate::transaction::{ProvenTransaction, TransactionId}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Hasher, Word, ZERO}; // BATCH ID diff --git a/crates/miden-protocol/src/batch/input_output_note_tracker.rs b/crates/miden-protocol/src/batch/input_output_note_tracker.rs index 296cf021e6..3b90259899 100644 --- a/crates/miden-protocol/src/batch/input_output_note_tracker.rs +++ b/crates/miden-protocol/src/batch/input_output_note_tracker.rs @@ -250,7 +250,7 @@ impl InputOutputNoteTracker { // This could happen if the metadata of the notes is different, which we consider an // error. let input_commitment = input_note_header.commitment(); - let output_commitment = output_note.commitment(); + let output_commitment = output_note.to_commitment(); if output_commitment != input_commitment { return Err(InputOutputNoteTrackerError::NoteCommitmentMismatch { id, diff --git a/crates/miden-protocol/src/batch/note_tree.rs b/crates/miden-protocol/src/batch/note_tree.rs index 7897856389..e0aa847f01 100644 --- a/crates/miden-protocol/src/batch/note_tree.rs +++ b/crates/miden-protocol/src/batch/note_tree.rs @@ -3,7 +3,13 @@ use alloc::vec::Vec; use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{LeafIndex, SimpleSmt}; use crate::note::{NoteId, NoteMetadata, compute_note_commitment}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{BATCH_NOTE_TREE_DEPTH, EMPTY_WORD, Word}; /// Wrapper over [SimpleSmt] for batch note tree. diff --git a/crates/miden-protocol/src/batch/ordered_batches.rs b/crates/miden-protocol/src/batch/ordered_batches.rs index 00b04fcf9a..749707480c 100644 --- a/crates/miden-protocol/src/batch/ordered_batches.rs +++ b/crates/miden-protocol/src/batch/ordered_batches.rs @@ -2,7 +2,13 @@ use alloc::vec::Vec; use crate::batch::ProvenBatch; use crate::transaction::OrderedTransactionHeaders; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ORDERED BATCHES // ================================================================================================ diff --git a/crates/miden-protocol/src/batch/proposed_batch.rs b/crates/miden-protocol/src/batch/proposed_batch.rs index 5d5cb0f97b..b0a96439ba 100644 --- a/crates/miden-protocol/src/batch/proposed_batch.rs +++ b/crates/miden-protocol/src/batch/proposed_batch.rs @@ -17,7 +17,13 @@ use crate::transaction::{ ProvenTransaction, TransactionHeader, }; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH}; /// A proposed batch of transactions with all necessary data to validate it. @@ -52,7 +58,8 @@ pub struct ProposedBatch { /// [`InputNoteCommitment::nullifier`]. input_notes: InputNotes, /// The output notes of this batch. This consists of all notes created by transactions in the - /// batch that are not consumed within the same batch. These are sorted by [`OutputNote::id`]. + /// batch that are not consumed within the same batch. These are sorted by + /// [`OutputNote::id`]. output_notes: Vec, } @@ -426,14 +433,15 @@ impl Deserializable for ProposedBatch { mod tests { use anyhow::Context; use miden_crypto::merkle::mmr::{Mmr, PartialMmr}; + use miden_crypto::rand::test_utils::rand_value; use miden_verifier::ExecutionProof; - use winter_rand_utils::rand_value; use super::*; use crate::Word; + use crate::account::delta::AccountUpdateDetails; use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::asset::FungibleAsset; - use crate::transaction::ProvenTransactionBuilder; + use crate::transaction::{InputNoteCommitment, OutputNote, ProvenTransaction, TxAccountUpdate}; #[test] fn proposed_batch_serialization() -> anyhow::Result<()> { @@ -474,18 +482,25 @@ mod tests { let expiration_block_num = reference_block_header.block_num() + 1; let proof = ExecutionProof::new_dummy(); - let tx = ProvenTransactionBuilder::new( + let account_update = TxAccountUpdate::new( account_id, initial_account_commitment, final_account_commitment, account_delta_commitment, + AccountUpdateDetails::Private, + ) + .context("failed to build account update")?; + + let tx = ProvenTransaction::new( + account_update, + Vec::::new(), + Vec::::new(), block_num, block_ref, FungibleAsset::mock(100).unwrap_fungible(), expiration_block_num, proof, ) - .build() .context("failed to build proven transaction")?; let batch = ProposedBatch::new( diff --git a/crates/miden-protocol/src/batch/proven_batch.rs b/crates/miden-protocol/src/batch/proven_batch.rs index 97075a8736..eb8aae5495 100644 --- a/crates/miden-protocol/src/batch/proven_batch.rs +++ b/crates/miden-protocol/src/batch/proven_batch.rs @@ -8,7 +8,13 @@ use crate::block::BlockNumber; use crate::errors::ProvenBatchError; use crate::note::Nullifier; use crate::transaction::{InputNoteCommitment, InputNotes, OrderedTransactionHeaders, OutputNote}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{MIN_PROOF_SECURITY_LEVEL, Word}; /// A transaction batch with an execution proof. diff --git a/crates/miden-protocol/src/block/account_tree/account_id_key.rs b/crates/miden-protocol/src/block/account_tree/account_id_key.rs new file mode 100644 index 0000000000..1974e866b5 --- /dev/null +++ b/crates/miden-protocol/src/block/account_tree/account_id_key.rs @@ -0,0 +1,156 @@ +use miden_crypto::merkle::smt::LeafIndex; + +use super::AccountId; +use crate::Word; +use crate::crypto::merkle::smt::SMT_DEPTH; +use crate::errors::AccountIdError; + +/// The account ID encoded as a key for use in AccountTree and advice maps in +/// `TransactionAdviceInputs`. +/// +/// Canonical word layout: +/// +/// [0, 0, suffix, prefix] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AccountIdKey(AccountId); + +impl AccountIdKey { + // Indices in the word layout where the prefix and suffix are stored. + const KEY_SUFFIX_IDX: usize = 2; + const KEY_PREFIX_IDX: usize = 3; + + /// Create from AccountId + pub fn new(id: AccountId) -> Self { + Self(id) + } + + /// Returns the underlying AccountId + pub fn account_id(&self) -> AccountId { + self.0 + } + + // SMT WORD REPRESENTATION + //--------------------------------------------------------------------------------------------------- + + /// Returns `[0, 0, suffix, prefix]` + pub fn as_word(&self) -> Word { + let mut key = Word::empty(); + + key[Self::KEY_SUFFIX_IDX] = self.0.suffix(); + key[Self::KEY_PREFIX_IDX] = self.0.prefix().as_felt(); + + key + } + + /// Construct from SMT word representation. + /// + /// Validates structure before converting. + pub fn try_from_word(word: Word) -> Result { + AccountId::try_from_elements(word[Self::KEY_SUFFIX_IDX], word[Self::KEY_PREFIX_IDX]) + } + + // LEAF INDEX + //--------------------------------------------------------------------------------------------------- + + /// Converts to SMT leaf index used by AccountTree + pub fn to_leaf_index(&self) -> LeafIndex { + LeafIndex::from(self.as_word()) + } +} + +impl From for AccountIdKey { + fn from(id: AccountId) -> Self { + Self(id) + } +} + +// TESTS +//--------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + + use miden_core::ZERO; + + use super::{AccountId, *}; + use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; + #[test] + fn test_as_word_layout() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key = AccountIdKey::from(id); + let word = key.as_word(); + + assert_eq!(word[0], ZERO); + assert_eq!(word[1], ZERO); + assert_eq!(word[2], id.suffix()); + assert_eq!(word[3], id.prefix().as_felt()); + } + + #[test] + fn test_roundtrip_word_conversion() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + + let key = AccountIdKey::from(id); + let recovered = + AccountIdKey::try_from_word(key.as_word()).expect("valid account id conversion"); + + assert_eq!(id, recovered); + } + + #[test] + fn test_leaf_index_consistency() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key = AccountIdKey::from(id); + + let idx1 = key.to_leaf_index(); + let idx2 = key.to_leaf_index(); + + assert_eq!(idx1, idx2); + } + + #[test] + fn test_from_conversion() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key: AccountIdKey = id.into(); + + assert_eq!(key.account_id(), id); + } + + #[test] + fn test_multiple_roundtrips() { + for _ in 0..100 { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key = AccountIdKey::from(id); + + let recovered = + AccountIdKey::try_from_word(key.as_word()).expect("valid account id conversion"); + + assert_eq!(id, recovered); + } + } +} diff --git a/crates/miden-protocol/src/block/account_tree/backend.rs b/crates/miden-protocol/src/block/account_tree/backend.rs index 78dc989786..58963f0e44 100644 --- a/crates/miden-protocol/src/block/account_tree/backend.rs +++ b/crates/miden-protocol/src/block/account_tree/backend.rs @@ -1,7 +1,7 @@ use alloc::boxed::Box; use alloc::vec::Vec; -use super::{AccountId, AccountIdPrefix, AccountTree, AccountTreeError, account_id_to_smt_key}; +use super::{AccountId, AccountIdKey, AccountIdPrefix, AccountTree, AccountTreeError}; use crate::Word; use crate::crypto::merkle::MerkleError; #[cfg(feature = "std")] @@ -129,9 +129,7 @@ where type Error = MerkleError; fn num_leaves(&self) -> usize { - // LargeSmt::num_leaves returns Result - // We'll unwrap or return 0 on error - LargeSmt::num_leaves(self).map_err(large_smt_error_to_merkle_error).unwrap_or(0) + LargeSmt::num_leaves(self) } fn leaves<'a>(&'a self) -> Box, SmtLeaf)>> { @@ -205,7 +203,7 @@ impl AccountTree { let smt = Smt::with_entries( entries .into_iter() - .map(|(id, commitment)| (account_id_to_smt_key(id), commitment)), + .map(|(id, commitment)| (AccountIdKey::from(id).as_word(), commitment)), ) .map_err(|err| { let MerkleError::DuplicateValuesForIndex(leaf_idx) = err else { @@ -234,6 +232,13 @@ fn large_smt_error_to_merkle_error(err: LargeSmtError) -> MerkleError { LargeSmtError::Storage(storage_err) => { panic!("Storage error encountered: {:?}", storage_err) }, + LargeSmtError::StorageNotEmpty => { + panic!("StorageNotEmpty error encountered: {:?}", err) + }, LargeSmtError::Merkle(merkle_err) => merkle_err, + LargeSmtError::RootMismatch { expected, actual } => MerkleError::ConflictingRoots { + expected_root: expected, + actual_root: actual, + }, } } diff --git a/crates/miden-protocol/src/block/account_tree/mod.rs b/crates/miden-protocol/src/block/account_tree/mod.rs index eab69c32de..e594684be1 100644 --- a/crates/miden-protocol/src/block/account_tree/mod.rs +++ b/crates/miden-protocol/src/block/account_tree/mod.rs @@ -1,14 +1,18 @@ use alloc::string::ToString; use alloc::vec::Vec; -use miden_crypto::merkle::smt::LeafIndex; - use crate::Word; use crate::account::{AccountId, AccountIdPrefix}; use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{MutationSet, SMT_DEPTH, Smt, SmtLeaf}; use crate::errors::AccountTreeError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; mod partial; pub use partial::PartialAccountTree; @@ -19,39 +23,8 @@ pub use witness::AccountWitness; mod backend; pub use backend::AccountTreeBackend; -// FREE HELPER FUNCTIONS -// ================================================================================================ -// These module-level functions provide conversions between AccountIds and SMT keys. -// They avoid the need for awkward syntax like account_id_to_smt_key(). - -const KEY_SUFFIX_IDX: usize = 2; -const KEY_PREFIX_IDX: usize = 3; - -/// Converts an [`AccountId`] to an SMT key for use in account trees. -/// -/// The key is constructed with the account ID suffix at index 2 and prefix at index 3. -pub fn account_id_to_smt_key(account_id: AccountId) -> Word { - let mut key = Word::empty(); - key[KEY_SUFFIX_IDX] = account_id.suffix(); - key[KEY_PREFIX_IDX] = account_id.prefix().as_felt(); - key -} - -/// Recovers an [`AccountId`] from an SMT key. -/// -/// # Panics -/// -/// Panics if the key does not represent a valid account ID. This should never happen when used -/// with keys from account trees, as the tree only stores valid IDs. -pub fn smt_key_to_account_id(key: Word) -> AccountId { - AccountId::try_from([key[KEY_PREFIX_IDX], key[KEY_SUFFIX_IDX]]) - .expect("account tree should only contain valid IDs") -} - -/// Converts an AccountId to an SMT leaf index for use with MerkleStore operations. -pub fn account_id_to_smt_index(account_id: AccountId) -> LeafIndex { - account_id_to_smt_key(account_id).into() -} +mod account_id_key; +pub use account_id_key::AccountIdKey; // ACCOUNT TREE // ================================================================================================ @@ -104,7 +77,8 @@ where /// # Errors /// /// Returns an error if: - /// - The SMT contains duplicate account ID prefixes + /// - The SMT contains invalid account IDs. + /// - The SMT contains duplicate account ID prefixes. pub fn new(smt: S) -> Result { for (_leaf_idx, leaf) in smt.leaves() { match leaf { @@ -114,13 +88,19 @@ where }, SmtLeaf::Single((key, _)) => { // Single entry is good - verify it's a valid account ID - smt_key_to_account_id(key); + AccountIdKey::try_from_word(key).map_err(|err| { + AccountTreeError::InvalidAccountIdKey { key, source: err } + })?; }, SmtLeaf::Multiple(entries) => { // Multiple entries means duplicate prefixes // Extract one of the keys to identify the duplicate prefix if let Some((key, _)) = entries.first() { - let account_id = smt_key_to_account_id(*key); + let key = *key; + let account_id = AccountIdKey::try_from_word(key).map_err(|err| { + AccountTreeError::InvalidAccountIdKey { key, source: err } + })?; + return Err(AccountTreeError::DuplicateIdPrefix { duplicate_prefix: account_id.prefix(), }); @@ -158,7 +138,7 @@ where /// /// Panics if the SMT backend fails to open the leaf (only possible with `LargeSmt` backend). pub fn open(&self, account_id: AccountId) -> AccountWitness { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); let proof = self.smt.open(&key); AccountWitness::from_smt_proof(account_id, proof) @@ -166,7 +146,7 @@ where /// Returns the current state commitment of the given account ID. pub fn get(&self, account_id: AccountId) -> Word { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); self.smt.get_value(&key) } @@ -201,7 +181,7 @@ where ( // SAFETY: By construction, the tree only contains valid IDs. - AccountId::try_from([key[Self::KEY_PREFIX_IDX], key[Self::KEY_SUFFIX_IDX]]) + AccountId::try_from_elements(key[Self::KEY_SUFFIX_IDX], key[Self::KEY_PREFIX_IDX]) .expect("account tree should only contain valid IDs"), commitment, ) @@ -234,7 +214,7 @@ where .compute_mutations(Vec::from_iter( account_commitments .into_iter() - .map(|(id, commitment)| (account_id_to_smt_key(id), commitment)), + .map(|(id, commitment)| (AccountIdKey::from(id).as_word(), commitment)), )) .map_err(AccountTreeError::ComputeMutations)?; @@ -248,7 +228,9 @@ where // valid. If it does not match, then we would insert a duplicate. if existing_key != *id_key { return Err(AccountTreeError::DuplicateIdPrefix { - duplicate_prefix: smt_key_to_account_id(*id_key).prefix(), + duplicate_prefix: AccountIdKey::try_from_word(*id_key) + .expect("account tree should only contain valid IDs") + .prefix(), }); } }, @@ -281,7 +263,7 @@ where account_id: AccountId, state_commitment: Word, ) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); // SAFETY: account tree should not contain multi-entry leaves and so the maximum number // of entries per leaf should never be exceeded. let prev_value = self.smt.insert(key, state_commitment) @@ -372,9 +354,10 @@ impl Deserializable for AccountTree { } // Create the SMT with validated entries - let smt = - Smt::with_entries(entries.into_iter().map(|(k, v)| (account_id_to_smt_key(k), v))) - .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; + let smt = Smt::with_entries( + entries.into_iter().map(|(k, v)| (AccountIdKey::from(k).as_word(), v)), + ) + .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; Ok(Self::new_unchecked(smt)) } } @@ -556,7 +539,7 @@ pub(super) mod tests { assert_eq!(tree.num_accounts(), 2); for id in [id0, id1] { - let proof = tree.smt.open(&account_id_to_smt_key(id)); + let proof = tree.smt.open(&AccountIdKey::from(id).as_word()); let (control_path, control_leaf) = proof.into_parts(); let witness = tree.open(id); @@ -600,7 +583,10 @@ pub(super) mod tests { // Create AccountTree with LargeSmt backend let tree = LargeSmt::::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); @@ -617,7 +603,10 @@ pub(super) mod tests { // Test mutations let mut tree_mut = LargeSmt::::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); @@ -666,7 +655,10 @@ pub(super) mod tests { let mut tree = LargeSmt::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); @@ -697,7 +689,10 @@ pub(super) mod tests { // Create tree with LargeSmt backend let large_tree = LargeSmt::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); diff --git a/crates/miden-protocol/src/block/account_tree/partial.rs b/crates/miden-protocol/src/block/account_tree/partial.rs index af84f725ae..4530f77cf0 100644 --- a/crates/miden-protocol/src/block/account_tree/partial.rs +++ b/crates/miden-protocol/src/block/account_tree/partial.rs @@ -1,6 +1,6 @@ use miden_crypto::merkle::smt::{PartialSmt, SmtLeaf}; -use super::{AccountWitness, account_id_to_smt_key}; +use super::{AccountIdKey, AccountWitness}; use crate::Word; use crate::account::AccountId; use crate::errors::AccountTreeError; @@ -68,7 +68,7 @@ impl PartialAccountTree { /// Returns an error if: /// - the account ID is not tracked by this account tree. pub fn open(&self, account_id: AccountId) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); self.smt .open(&key) @@ -83,7 +83,7 @@ impl PartialAccountTree { /// Returns an error if: /// - the account ID is not tracked by this account tree. pub fn get(&self, account_id: AccountId) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); self.smt .get_value(&key) .map_err(|source| AccountTreeError::UntrackedAccountId { id: account_id, source }) @@ -109,17 +109,22 @@ impl PartialAccountTree { /// witness. pub fn track_account(&mut self, witness: AccountWitness) -> Result<(), AccountTreeError> { let id_prefix = witness.id().prefix(); - let id_key = account_id_to_smt_key(witness.id()); + let id_key = AccountIdKey::from(witness.id()).as_word(); - // If a leaf with the same prefix is already tracked by this partial tree, consider it an + // If there exists a tracked leaf with a non-empty entry whose key differs from the one + // we're about to track, then two different account IDs share the same prefix, which is an // error. // - // We return an error even for empty leaves, because tracking the same ID prefix twice - // indicates that different IDs are attempted to be tracked. It would technically not - // violate the invariant of the tree that it only tracks zero or one entries per leaf, but - // since tracking the same ID twice should practically never happen, we return an error, out - // of an abundance of caution. - if self.smt.get_leaf(&id_key).is_ok() { + // Note that if the leaf is empty, that's fine: `PartialSmt::get_leaf` returns + // `Ok(SmtLeaf::Empty)` for any leaf position reachable through provably-empty subtrees, + // even if no proof was explicitly added for that position. In a sparse tree this covers + // most of the leaf space, so treating empty leaves as duplicates would reject nearly every + // second witness. + // + // Also note that the multiple variant cannot occur by construction of the account tree. + if let Ok(SmtLeaf::Single((existing_key, _))) = self.smt.get_leaf(&id_key) + && id_key != existing_key + { return Err(AccountTreeError::DuplicateIdPrefix { duplicate_prefix: id_prefix }); } @@ -165,7 +170,7 @@ impl PartialAccountTree { account_id: AccountId, state_commitment: Word, ) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); // If there exists a tracked leaf whose key is _not_ the one we're about to overwrite, then // we would insert the new commitment next to an existing account ID with the same prefix, @@ -195,6 +200,7 @@ mod tests { use super::*; use crate::block::account_tree::AccountTree; use crate::block::account_tree::tests::setup_duplicate_prefix_ids; + use crate::testing::account_id::AccountIdBuilder; #[test] fn insert_fails_on_duplicate_prefix() -> anyhow::Result<()> { @@ -252,15 +258,65 @@ mod tests { assert_eq!(partial_tree.get(id0).unwrap(), commitment1); } + /// Check that updating an account ID in the partial account tree fails if that ID is not + /// tracked. #[test] - fn upsert_state_commitments_fails_on_untracked_key() { - let mut partial_tree = PartialAccountTree::default(); - let [update, _] = setup_duplicate_prefix_ids(); + fn upsert_state_commitments_fails_on_untracked_key() -> anyhow::Result<()> { + let id0 = AccountIdBuilder::default().build_with_seed([5; 32]); + let id2 = AccountIdBuilder::default().build_with_seed([6; 32]); + + let commitment0 = Word::from([1, 2, 3, 4u32]); + let commitment2 = Word::from([2, 3, 4, 5u32]); - let err = partial_tree.upsert_state_commitments([update]).unwrap_err(); + let account_tree = AccountTree::with_entries([(id0, commitment0), (id2, commitment2)])?; + // Let the partial account tree only track id0, not id2. + let mut partial_tree = PartialAccountTree::with_witnesses([account_tree.open(id0)])?; + + let err = partial_tree.upsert_state_commitments([(id2, commitment0)]).unwrap_err(); assert_matches!(err, AccountTreeError::UntrackedAccountId { id, .. } - if id == update.0 - ) + if id == id2 + ); + + Ok(()) + } + + /// Verifies that tracking multiple witnesses succeeds in a sparse tree, where most leaf + /// positions are reachable through provably-empty subtrees, including `SmtLeaf::Empty` + /// leaves that are provably empty but not actually occupied. + #[test] + fn track_succeeds_for_multiple_witnesses_in_sparse_tree() -> anyhow::Result<()> { + let id0 = AccountIdBuilder::default().build_with_seed([10; 32]); + let id1 = AccountIdBuilder::default().build_with_seed([11; 32]); + let id2 = AccountIdBuilder::default().build_with_seed([12; 32]); + + let commitment0 = Word::from([1, 2, 3, 4u32]); + let commitment1 = Word::from([5, 6, 7, 8u32]); + + // Create a tree with only one account (very sparse). + let account_tree = AccountTree::with_entries([(id0, commitment0)])?; + + // Get witnesses for one existing and two new (empty) accounts. + let witness0 = account_tree.open(id0); + let witness1 = account_tree.open(id1); + let witness2 = account_tree.open(id2); + + // Building a partial tree from all three witnesses should succeed: + // id1 and id2 have empty leaves that are provably empty via the sparse tree structure, + // but they are NOT duplicates of id0. + let mut partial_tree = + PartialAccountTree::with_witnesses([witness0, witness1.clone(), witness2])?; + + // Adding the same witness again should also succeed. + partial_tree.track_account(witness1)?; + + // Verify the existing account has its commitment. + assert_eq!(partial_tree.get(id0)?, commitment0); + + // We should be able to insert new state commitments for the new accounts. + partial_tree.upsert_state_commitments([(id1, commitment1)])?; + assert_eq!(partial_tree.get(id1)?, commitment1); + + Ok(()) } #[test] @@ -269,14 +325,14 @@ mod tests { // account IDs with the same prefix. let full_tree = Smt::with_entries( setup_duplicate_prefix_ids() - .map(|(id, commitment)| (account_id_to_smt_key(id), commitment)), + .map(|(id, commitment)| (AccountIdKey::from(id).as_word(), commitment)), ) .unwrap(); let [(id0, _), (id1, _)] = setup_duplicate_prefix_ids(); - let key0 = account_id_to_smt_key(id0); - let key1 = account_id_to_smt_key(id1); + let key0 = AccountIdKey::from(id0).as_word(); + let key1 = AccountIdKey::from(id1).as_word(); let proof0 = full_tree.open(&key0); let proof1 = full_tree.open(&key1); assert_eq!(proof0.leaf(), proof1.leaf()); diff --git a/crates/miden-protocol/src/block/account_tree/witness.rs b/crates/miden-protocol/src/block/account_tree/witness.rs index 1001803a85..0b23b9f6ba 100644 --- a/crates/miden-protocol/src/block/account_tree/witness.rs +++ b/crates/miden-protocol/src/block/account_tree/witness.rs @@ -1,13 +1,19 @@ use alloc::string::ToString; -use miden_crypto::merkle::smt::{LeafIndex, SMT_DEPTH, SmtLeaf, SmtProof, SmtProofError}; +use miden_crypto::merkle::smt::{SMT_DEPTH, SmtLeaf, SmtProof, SmtProofError}; use miden_crypto::merkle::{InnerNodeInfo, SparseMerklePath}; use crate::Word; use crate::account::AccountId; -use crate::block::account_tree::{account_id_to_smt_key, smt_key_to_account_id}; +use crate::block::account_tree::AccountIdKey; use crate::errors::AccountTreeError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT WITNESS // ================================================================================================ @@ -68,6 +74,7 @@ impl AccountWitness { /// # Panics /// /// Panics if: + /// - the proof contains an entry whose key contains an invalid account ID. /// - the merkle path in the proof does not have depth equal to [`SMT_DEPTH`]. /// - the proof contains an SmtLeaf::Multiple. pub(super) fn from_smt_proof(requested_account_id: AccountId, proof: SmtProof) -> Self { @@ -83,7 +90,8 @@ impl AccountWitness { SmtLeaf::Empty(_) => requested_account_id, SmtLeaf::Single((key_in_leaf, _)) => { // SAFETY: By construction, the tree only contains valid IDs. - smt_key_to_account_id(*key_in_leaf) + AccountIdKey::try_from_word(*key_in_leaf) + .expect("account tree should only contain valid IDs") }, SmtLeaf::Multiple(_) => { unreachable!("account tree should only contain zero or one entry per ID prefix") @@ -91,7 +99,7 @@ impl AccountWitness { }; let commitment = proof - .get(&account_id_to_smt_key(witness_id)) + .get(&AccountIdKey::from(witness_id).as_word()) .expect("we should have received a proof for the witness key"); // SAFETY: The proof is guaranteed to have depth SMT_DEPTH if it comes from one of @@ -132,10 +140,10 @@ impl AccountWitness { /// Returns the [`SmtLeaf`] of the account witness. pub fn leaf(&self) -> SmtLeaf { if self.commitment == Word::empty() { - let leaf_idx = LeafIndex::from(account_id_to_smt_key(self.id)); + let leaf_idx = AccountIdKey::from(self.id).to_leaf_index(); SmtLeaf::new_empty(leaf_idx) } else { - let key = account_id_to_smt_key(self.id); + let key = AccountIdKey::from(self.id).as_word(); SmtLeaf::new_single(key, self.commitment) } } @@ -152,7 +160,7 @@ impl AccountWitness { pub fn authenticated_nodes(&self) -> impl Iterator + '_ { let leaf = self.leaf(); self.path() - .authenticated_nodes(leaf.index().value(), leaf.hash()) + .authenticated_nodes(leaf.index().position(), leaf.hash()) .expect("leaf index is u64 and should be less than 2^SMT_DEPTH") } } diff --git a/crates/miden-protocol/src/block/block_account_update.rs b/crates/miden-protocol/src/block/block_account_update.rs index d3e2541613..8b809151d6 100644 --- a/crates/miden-protocol/src/block/block_account_update.rs +++ b/crates/miden-protocol/src/block/block_account_update.rs @@ -1,7 +1,13 @@ use crate::Word; use crate::account::AccountId; use crate::account::delta::AccountUpdateDetails; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // BLOCK ACCOUNT UPDATE // ================================================================================================ diff --git a/crates/miden-protocol/src/block/block_body.rs b/crates/miden-protocol/src/block/block_body.rs index 53b86741cb..4b10460edd 100644 --- a/crates/miden-protocol/src/block/block_body.rs +++ b/crates/miden-protocol/src/block/block_body.rs @@ -1,13 +1,6 @@ use alloc::vec::Vec; use miden_core::Word; -use miden_core::utils::{ - ByteReader, - ByteWriter, - Deserializable, - DeserializationError, - Serializable, -}; use crate::block::{ BlockAccountUpdate, @@ -18,6 +11,13 @@ use crate::block::{ }; use crate::note::Nullifier; use crate::transaction::{OrderedTransactionHeaders, OutputNote}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // BLOCK BODY // ================================================================================================ diff --git a/crates/miden-protocol/src/block/block_inputs.rs b/crates/miden-protocol/src/block/block_inputs.rs index e67f4e0bba..5e531f9e82 100644 --- a/crates/miden-protocol/src/block/block_inputs.rs +++ b/crates/miden-protocol/src/block/block_inputs.rs @@ -6,7 +6,13 @@ use crate::block::account_tree::AccountWitness; use crate::block::nullifier_tree::NullifierWitness; use crate::note::{NoteId, NoteInclusionProof, Nullifier}; use crate::transaction::PartialBlockchain; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // BLOCK INPUTS // ================================================================================================ diff --git a/crates/miden-protocol/src/block/block_number.rs b/crates/miden-protocol/src/block/block_number.rs index 9660084388..aec6613a48 100644 --- a/crates/miden-protocol/src/block/block_number.rs +++ b/crates/miden-protocol/src/block/block_number.rs @@ -99,8 +99,8 @@ impl Deserializable for BlockNumber { } impl From for Felt { - fn from(value: BlockNumber) -> Self { - Felt::from(value.as_u32()) + fn from(block_num: BlockNumber) -> Self { + Felt::from(block_num.as_u32()) } } diff --git a/crates/miden-protocol/src/block/block_proof.rs b/crates/miden-protocol/src/block/block_proof.rs index 710a77cb03..999b9ccd3a 100644 --- a/crates/miden-protocol/src/block/block_proof.rs +++ b/crates/miden-protocol/src/block/block_proof.rs @@ -1,4 +1,10 @@ -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// Represents a proof of a block in the chain. /// diff --git a/crates/miden-protocol/src/block/blockchain.rs b/crates/miden-protocol/src/block/blockchain.rs index a70159fbd0..17c96bbad9 100644 --- a/crates/miden-protocol/src/block/blockchain.rs +++ b/crates/miden-protocol/src/block/blockchain.rs @@ -1,11 +1,16 @@ use alloc::collections::BTreeSet; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; use miden_crypto::merkle::mmr::{Forest, Mmr, MmrError, MmrPeaks, MmrProof, PartialMmr}; -use miden_processor::DeserializationError; use crate::Word; use crate::block::BlockNumber; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The [Merkle Mountain Range](Mmr) defining the Miden blockchain. /// @@ -140,13 +145,13 @@ impl Blockchain { let mut partial_mmr = PartialMmr::from_peaks(peaks); for block_num in blocks.iter() { let leaf = self.mmr.get(block_num.as_usize())?; - let path = self.open_at(*block_num, checkpoint)?.merkle_path; + let proof = self.open_at(*block_num, checkpoint)?; // SAFETY: We should be able to fill the partial MMR with data from the partial // blockchain without errors, otherwise it indicates the blockchain is // invalid. partial_mmr - .track(block_num.as_usize(), leaf, &path) + .track(block_num.as_usize(), leaf, proof.merkle_path()) .expect("filling partial mmr with data from mmr should succeed"); } diff --git a/crates/miden-protocol/src/block/header.rs b/crates/miden-protocol/src/block/header.rs index 2ba311455e..fc13578258 100644 --- a/crates/miden-protocol/src/block/header.rs +++ b/crates/miden-protocol/src/block/header.rs @@ -241,10 +241,10 @@ impl BlockHeader { elements.extend_from_slice(tx_commitment.as_elements()); elements.extend_from_slice(tx_kernel_commitment.as_elements()); elements.extend(validator_key.to_commitment()); - elements.extend([block_num.into(), version.into(), timestamp.into(), ZERO]); + elements.extend([block_num.into(), Felt::from(version), Felt::from(timestamp), ZERO]); elements.extend([ ZERO, - fee_parameters.verification_base_fee().into(), + Felt::from(fee_parameters.verification_base_fee()), fee_parameters.native_asset_id().suffix(), fee_parameters.native_asset_id().prefix().as_felt(), ]); @@ -400,7 +400,7 @@ impl Deserializable for FeeParameters { mod tests { use assert_matches::assert_matches; use miden_core::Word; - use winter_rand_utils::rand_value; + use miden_crypto::rand::test_utils::rand_value; use super::*; use crate::testing::account_id::ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET; diff --git a/crates/miden-protocol/src/block/note_tree.rs b/crates/miden-protocol/src/block/note_tree.rs index 497aab12ba..81665b238a 100644 --- a/crates/miden-protocol/src/block/note_tree.rs +++ b/crates/miden-protocol/src/block/note_tree.rs @@ -1,4 +1,5 @@ use alloc::string::ToString; +use alloc::vec::Vec; use miden_crypto::merkle::SparseMerklePath; @@ -6,7 +7,13 @@ use crate::batch::BatchNoteTree; use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{LeafIndex, SimpleSmt}; use crate::note::{NoteId, NoteMetadata, compute_note_commitment}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{ BLOCK_NOTE_TREE_DEPTH, MAX_BATCHES_PER_BLOCK, @@ -148,7 +155,7 @@ impl BlockNoteIndex { ); self.leaf_index() - .value() + .position() .try_into() .expect("Unreachable: Input values must be valid at this point") } @@ -167,7 +174,7 @@ impl Serializable for BlockNoteTree { impl Deserializable for BlockNoteTree { fn read_from(source: &mut R) -> Result { let count = source.read_u32()?; - let leaves = source.read_many(count as usize)?; + let leaves = source.read_many_iter(count as usize)?.collect::, _>>()?; SimpleSmt::with_leaves(leaves) .map(Self) diff --git a/crates/miden-protocol/src/block/nullifier_tree/backend.rs b/crates/miden-protocol/src/block/nullifier_tree/backend.rs index 603258ea0a..90f0955046 100644 --- a/crates/miden-protocol/src/block/nullifier_tree/backend.rs +++ b/crates/miden-protocol/src/block/nullifier_tree/backend.rs @@ -114,12 +114,7 @@ where type Error = MerkleError; fn num_entries(&self) -> usize { - // SAFETY: We panic on storage errors here as they represent unrecoverable I/O failures. - // This maintains API compatibility with the non-fallible Smt::num_entries(). - // See issue #2010 for future improvements to error handling. LargeSmt::num_entries(self) - .map_err(large_smt_error_to_merkle_error) - .expect("Storage I/O error accessing num_entries") } fn entries(&self) -> Box + '_> { @@ -230,6 +225,13 @@ pub(super) fn large_smt_error_to_merkle_error(err: LargeSmtError) -> MerkleError LargeSmtError::Storage(storage_err) => { panic!("Storage error encountered: {:?}", storage_err) }, + LargeSmtError::StorageNotEmpty => { + panic!("StorageNotEmpty error encountered: {:?}", err) + }, LargeSmtError::Merkle(merkle_err) => merkle_err, + LargeSmtError::RootMismatch { expected, actual } => MerkleError::ConflictingRoots { + expected_root: expected, + actual_root: actual, + }, } } diff --git a/crates/miden-protocol/src/block/nullifier_tree/mod.rs b/crates/miden-protocol/src/block/nullifier_tree/mod.rs index 18332812ea..b85a4aebd7 100644 --- a/crates/miden-protocol/src/block/nullifier_tree/mod.rs +++ b/crates/miden-protocol/src/block/nullifier_tree/mod.rs @@ -6,8 +6,14 @@ use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{MutationSet, SMT_DEPTH, Smt}; use crate::errors::NullifierTreeError; use crate::note::Nullifier; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; -use crate::{Felt, FieldElement, Word}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Word}; mod backend; pub use backend::NullifierTreeBackend; @@ -273,7 +279,7 @@ impl NullifierBlock { /// - The 0th element in the word is not a valid [BlockNumber]. /// - Any of the remaining elements is non-zero. pub fn new(word: Word) -> Result { - let block_num = u32::try_from(word[0].as_int()) + let block_num = u32::try_from(word[0].as_canonical_u64()) .map(BlockNumber::from) .map_err(|_| NullifierTreeError::InvalidNullifierBlockNumber(word))?; diff --git a/crates/miden-protocol/src/block/nullifier_tree/partial.rs b/crates/miden-protocol/src/block/nullifier_tree/partial.rs index 0bcb90c80d..9897525196 100644 --- a/crates/miden-protocol/src/block/nullifier_tree/partial.rs +++ b/crates/miden-protocol/src/block/nullifier_tree/partial.rs @@ -111,7 +111,7 @@ impl PartialNullifierTree { mod tests { use assert_matches::assert_matches; use miden_crypto::merkle::smt::Smt; - use winter_rand_utils::rand_value; + use miden_crypto::rand::test_utils::rand_value; use super::*; use crate::block::nullifier_tree::NullifierTree; diff --git a/crates/miden-protocol/src/block/proven_block.rs b/crates/miden-protocol/src/block/proven_block.rs index 25a952e36c..68abc97d23 100644 --- a/crates/miden-protocol/src/block/proven_block.rs +++ b/crates/miden-protocol/src/block/proven_block.rs @@ -1,8 +1,37 @@ +use miden_core::Word; use miden_crypto::dsa::ecdsa_k256_keccak::Signature; use crate::MIN_PROOF_SECURITY_LEVEL; use crate::block::{BlockBody, BlockHeader, BlockProof}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// PROVEN BLOCK ERROR +// ================================================================================================ + +#[derive(Debug, thiserror::Error)] +pub enum ProvenBlockError { + #[error( + "ECDSA signature verification failed based on the proven block's header commitment, validator public key and signature" + )] + InvalidSignature, + #[error( + "header tx commitment ({header_tx_commitment}) does not match body tx commitment ({body_tx_commitment})" + )] + TxCommitmentMismatch { + header_tx_commitment: Word, + body_tx_commitment: Word, + }, + #[error( + "proven block header note root ({header_root}) does not match the corresponding body's note root ({body_root})" + )] + NoteRootMismatch { header_root: Word, body_root: Word }, +} // PROVEN BLOCK // ================================================================================================ @@ -29,6 +58,44 @@ pub struct ProvenBlock { } impl ProvenBlock { + /// Returns a new [`ProvenBlock`] instantiated from the provided components. + /// + /// Validates that the provided components correspond to each other by verifying the signature, + /// and checking for matching transaction commitments and note roots. + /// + /// Involves non-trivial computation. Use [`Self::new_unchecked`] if the validation is not + /// necessary. + /// + /// Note: this does not fully validate the consistency of provided components. Specifically, + /// we cannot validate that: + /// - That applying the account updates in the block body to the account tree represented by the + /// root from the previous block header would actually result in the account root in the + /// provided header. + /// - That inserting the created nullifiers in the block body to the nullifier tree represented + /// by the root from the previous block header would actually result in the nullifier root in + /// the provided header. + /// + /// # Errors + /// Returns an error if: + /// - If the validator signature does not verify against the block header commitment and the + /// validator key. + /// - If the transaction commitment in the block header is inconsistent with the transactions + /// included in the block body. + /// - If the note root in the block header is inconsistent with the notes included in the block + /// body. + pub fn new( + header: BlockHeader, + body: BlockBody, + signature: Signature, + proof: BlockProof, + ) -> Result { + let proven_block = Self { header, signature, body, proof }; + + proven_block.validate()?; + + Ok(proven_block) + } + /// Returns a new [`ProvenBlock`] instantiated from the provided components. /// /// # Warning @@ -44,6 +111,42 @@ impl ProvenBlock { Self { header, signature, body, proof } } + /// Validates that the components of the proven block correspond to each other by verifying the + /// signature, and checking for matching transaction commitments and note roots. + /// + /// Validation involves non-trivial computation, and depending on the size of the block may + /// take non-negligible amount of time. + /// + /// Note: this does not fully validate the consistency of internal components. Specifically, + /// we cannot validate that: + /// - That applying the account updates in the block body to the account tree represented by the + /// root from the previous block header would actually result in the account root in the + /// provided header. + /// - That inserting the created nullifiers in the block body to the nullifier tree represented + /// by the root from the previous block header would actually result in the nullifier root in + /// the provided header. + /// + /// # Errors + /// Returns an error if: + /// - If the validator signature does not verify against the block header commitment and the + /// validator key. + /// - If the transaction commitment in the block header is inconsistent with the transactions + /// included in the block body. + /// - If the note root in the block header is inconsistent with the notes included in the block + /// body. + pub fn validate(&self) -> Result<(), ProvenBlockError> { + // Verify signature. + self.validate_signature()?; + + // Validate that header / body transaction commitments match. + self.validate_tx_commitment()?; + + // Validate that header / body note roots match. + self.validate_note_root()?; + + Ok(()) + } + /// Returns the proof security level of the block. pub fn proof_security_level(&self) -> u32 { MIN_PROOF_SECURITY_LEVEL @@ -73,6 +176,45 @@ impl ProvenBlock { pub fn into_parts(self) -> (BlockHeader, BlockBody, Signature, BlockProof) { (self.header, self.body, self.signature, self.proof) } + + // HELPER METHODS + // -------------------------------------------------------------------------------------------- + + /// Performs ECDSA signature verification against the header commitment and validator key. + fn validate_signature(&self) -> Result<(), ProvenBlockError> { + if !self.signature.verify(self.header.commitment(), self.header.validator_key()) { + Err(ProvenBlockError::InvalidSignature) + } else { + Ok(()) + } + } + + /// Validates that the transaction commitments between the header and body match for this proven + /// block. + /// + /// Involves non-trivial computation of the body's transaction commitment. + fn validate_tx_commitment(&self) -> Result<(), ProvenBlockError> { + let header_tx_commitment = self.header.tx_commitment(); + let body_tx_commitment = self.body.transactions().commitment(); + if header_tx_commitment != body_tx_commitment { + Err(ProvenBlockError::TxCommitmentMismatch { header_tx_commitment, body_tx_commitment }) + } else { + Ok(()) + } + } + + /// Validates that the header's note tree root matches that of the body. + /// + /// Involves non-trivial computation of the body's note tree. + fn validate_note_root(&self) -> Result<(), ProvenBlockError> { + let header_root = self.header.note_root(); + let body_root = self.body.compute_block_note_tree().root(); + if header_root != body_root { + Err(ProvenBlockError::NoteRootMismatch { header_root, body_root }) + } else { + Ok(()) + } + } } // SERIALIZATION diff --git a/crates/miden-protocol/src/block/signed_block.rs b/crates/miden-protocol/src/block/signed_block.rs index e755af3267..12aead9af5 100644 --- a/crates/miden-protocol/src/block/signed_block.rs +++ b/crates/miden-protocol/src/block/signed_block.rs @@ -2,7 +2,13 @@ use miden_core::Word; use miden_crypto::dsa::ecdsa_k256_keccak::Signature; use crate::block::{BlockBody, BlockHeader, BlockNumber}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // SIGNED BLOCK ERROR // ================================================================================================ diff --git a/crates/miden-protocol/src/constants.rs b/crates/miden-protocol/src/constants.rs index 964025064e..c10d263a2d 100644 --- a/crates/miden-protocol/src/constants.rs +++ b/crates/miden-protocol/src/constants.rs @@ -6,6 +6,9 @@ pub const ACCOUNT_TREE_DEPTH: u8 = 64; /// The maximum allowed size of an account update is 256 KiB. pub const ACCOUNT_UPDATE_MAX_SIZE: u32 = 2u32.pow(18); +/// The maximum allowed size of a serialized note in bytes (256 KiB). +pub const NOTE_MAX_SIZE: u32 = 2u32.pow(18); + /// The maximum number of assets that can be stored in a single note. pub const MAX_ASSETS_PER_NOTE: usize = 255; diff --git a/crates/miden-protocol/src/errors/mod.rs b/crates/miden-protocol/src/errors/mod.rs index 01b18602f2..8be84c5d0a 100644 --- a/crates/miden-protocol/src/errors/mod.rs +++ b/crates/miden-protocol/src/errors/mod.rs @@ -5,16 +5,15 @@ use core::error::Error; use miden_assembly::Report; use miden_assembly::diagnostics::reporting::PrintDiagnostic; +use miden_core::Felt; use miden_core::mast::MastForestError; -use miden_core::{EventId, Felt}; use miden_crypto::merkle::mmr::MmrError; use miden_crypto::merkle::smt::{SmtLeafError, SmtProofError}; use miden_crypto::utils::HexParseError; -use miden_processor::DeserializationError; use thiserror::Error; use super::account::AccountId; -use super::asset::{FungibleAsset, NonFungibleAsset, TokenSymbol}; +use super::asset::{AssetVaultKey, FungibleAsset, NonFungibleAsset, TokenSymbol}; use super::crypto::merkle::MerkleError; use super::note::NoteId; use super::{MAX_BATCHES_PER_BLOCK, MAX_OUTPUT_NOTES_PER_BATCH, Word}; @@ -24,18 +23,26 @@ use crate::account::{ AccountIdPrefix, AccountStorage, AccountType, + StorageMapKey, StorageSlotId, - // StorageValueName, - // StorageValueNameError, - // TemplateTypeError, StorageSlotName, }; use crate::address::AddressType; -use crate::asset::AssetVaultKey; +use crate::asset::AssetId; use crate::batch::BatchId; use crate::block::BlockNumber; -use crate::note::{NoteAssets, NoteAttachmentArray, NoteTag, NoteType, Nullifier}; -use crate::transaction::{TransactionEventId, TransactionId}; +use crate::note::{ + NoteAssets, + NoteAttachmentArray, + NoteAttachmentKind, + NoteAttachmentScheme, + NoteTag, + NoteType, + Nullifier, +}; +use crate::transaction::TransactionId; +use crate::utils::serde::DeserializationError; +use crate::vm::EventId; use crate::{ ACCOUNT_UPDATE_MAX_SIZE, MAX_ACCOUNTS_PER_BATCH, @@ -43,6 +50,7 @@ use crate::{ MAX_INPUT_NOTES_PER_TX, MAX_NOTE_STORAGE_ITEMS, MAX_OUTPUT_NOTES_PER_TX, + NOTE_MAX_SIZE, }; #[cfg(any(feature = "testing", test))] @@ -52,13 +60,15 @@ pub use masm_error::MasmError; /// The errors from the MASM code of the transaction kernel. #[cfg(any(feature = "testing", test))] -#[rustfmt::skip] -pub mod tx_kernel; +pub mod tx_kernel { + include!(concat!(env!("OUT_DIR"), "/tx_kernel_errors.rs")); +} /// The errors from the MASM code of the Miden protocol library. #[cfg(any(feature = "testing", test))] -#[rustfmt::skip] -pub mod protocol; +pub mod protocol { + include!(concat!(env!("OUT_DIR"), "/protocol_errors.rs")); +} // ACCOUNT COMPONENT TEMPLATE ERROR // ================================================================================================ @@ -272,6 +282,8 @@ pub enum AccountTreeError { ApplyMutations(#[source] MerkleError), #[error("failed to compute account tree mutations")] ComputeMutations(#[source] MerkleError), + #[error("provided smt contains an invalid account ID in key {key}")] + InvalidAccountIdKey { key: Word, source: AccountIdError }, #[error("smt leaf's index is not a valid account ID prefix")] InvalidAccountIdPrefix(#[source] AccountIdError), #[error("account witness merkle path depth {0} does not match AccountTree::DEPTH")] @@ -401,9 +413,13 @@ pub enum AccountDeltaError { #[derive(Debug, Error)] pub enum StorageMapError { #[error("map entries contain key {key} twice with values {value0} and {value1}")] - DuplicateKey { key: Word, value0: Word, value1: Word }, - #[error("map key {raw_key} is not present in provided SMT proof")] - MissingKey { raw_key: Word }, + DuplicateKey { + key: StorageMapKey, + value0: Word, + value1: Word, + }, + #[error("map key {key} is not present in provided SMT proof")] + MissingKey { key: StorageMapKey }, } // BATCH ACCOUNT UPDATE ERROR @@ -439,33 +455,41 @@ pub enum AssetError { FungibleAssetAmountTooBig(u64), #[error("subtracting {subtrahend} from fungible asset amount {minuend} would underflow")] FungibleAssetAmountNotSufficient { minuend: u64, subtrahend: u64 }, - #[error("fungible asset word {0} does not contain expected ZERO at word index 1")] - FungibleAssetExpectedZero(Word), #[error( - "cannot add fungible asset with issuer {other_issuer} to fungible asset with issuer {original_issuer}" + "cannot combine fungible assets with different vault keys: {original_key} and {other_key}" )] - FungibleAssetInconsistentFaucetIds { - original_issuer: AccountId, - other_issuer: AccountId, + FungibleAssetInconsistentVaultKeys { + original_key: AssetVaultKey, + other_key: AssetVaultKey, }, #[error("faucet account ID in asset is invalid")] InvalidFaucetAccountId(#[source] Box), - #[error("faucet account ID in asset has a non-faucet prefix: {}", .0)] - InvalidFaucetAccountIdPrefix(AccountIdPrefix), #[error( "faucet id {0} of type {id_type} must be of type {expected_ty} for fungible assets", id_type = .0.account_type(), expected_ty = AccountType::FungibleFaucet )] FungibleFaucetIdTypeMismatch(AccountId), + #[error( + "asset ID prefix and suffix in a non-fungible asset's vault key must match indices 0 and 1 in the value, but asset ID was {asset_id} and value was {value}" + )] + NonFungibleAssetIdMustMatchValue { asset_id: AssetId, value: Word }, + #[error("asset ID prefix and suffix in a fungible asset's vault key must be zero but was {0}")] + FungibleAssetIdMustBeZero(AssetId), + #[error( + "the three most significant elements in a fungible asset's value must be zero but provided value was {0}" + )] + FungibleAssetValueMostSignificantElementsMustBeZero(Word), #[error( "faucet id {0} of type {id_type} must be of type {expected_ty} for non fungible assets", id_type = .0.account_type(), expected_ty = AccountType::NonFungibleFaucet )] - NonFungibleFaucetIdTypeMismatch(AccountIdPrefix), - #[error("asset vault key {actual} does not match expected asset vault key {expected}")] - AssetVaultKeyMismatch { actual: Word, expected: Word }, + NonFungibleFaucetIdTypeMismatch(AccountId), + #[error("smt proof in asset witness contains invalid key or value")] + AssetWitnessInvalid(#[source] Box), + #[error("invalid native asset callbacks encoding: {0}")] + InvalidAssetCallbackFlag(u8), } // TOKEN SYMBOL ERROR @@ -475,6 +499,11 @@ pub enum AssetError { pub enum TokenSymbolError { #[error("token symbol value {0} cannot exceed {max}", max = TokenSymbol::MAX_ENCODED_VALUE)] ValueTooLarge(u64), + #[error( + "token symbol value {0} cannot be less than {min}", + min = TokenSymbol::MIN_ENCODED_VALUE + )] + ValueTooSmall(u64), #[error("token symbol should have length between 1 and 12 characters, but {0} was provided")] InvalidLength(usize), #[error("token symbol contains a character that is not uppercase ASCII")] @@ -513,8 +542,6 @@ pub enum AssetVaultError { pub enum PartialAssetVaultError { #[error("provided SMT entry {entry} is not a valid asset")] InvalidAssetInSmt { entry: Word, source: AssetError }, - #[error("expected asset vault key to be {expected} but it was {actual}")] - AssetVaultKeyMismatch { expected: AssetVaultKey, actual: Word }, #[error("failed to add asset proof")] FailedToAddProof(#[source] MerkleError), #[error("asset is not tracked in the partial vault")] @@ -584,6 +611,20 @@ pub enum NoteError { UnknownNoteAttachmentKind(u8), #[error("note attachment of kind None must have attachment scheme None")] AttachmentKindNoneMustHaveAttachmentSchemeNone, + #[error( + "note attachment kind mismatch: header has {header_kind:?} but attachment has {attachment_kind:?}" + )] + AttachmentKindMismatch { + header_kind: NoteAttachmentKind, + attachment_kind: NoteAttachmentKind, + }, + #[error( + "note attachment scheme mismatch: header has {header_scheme:?} but attachment has {attachment_scheme:?}" + )] + AttachmentSchemeMismatch { + header_scheme: NoteAttachmentScheme, + attachment_scheme: NoteAttachmentScheme, + }, #[error("{error_msg}")] Other { error_msg: Box, @@ -753,17 +794,31 @@ pub enum TransactionOutputError { AccountUpdateCommitment(Box), } +// OUTPUT NOTE ERROR +// ================================================================================================ + +/// Errors that can occur when creating a +/// [`PublicOutputNote`](crate::transaction::PublicOutputNote) or +/// [`PrivateNoteHeader`](crate::transaction::PrivateNoteHeader). +#[derive(Debug, Error)] +pub enum OutputNoteError { + #[error("note with id {0} is private but expected a public note")] + NoteIsPrivate(NoteId), + #[error("note with id {0} is public but expected a private note")] + NoteIsPublic(NoteId), + #[error( + "public note with id {note_id} has size {note_size} bytes which exceeds maximum note size of {NOTE_MAX_SIZE}" + )] + NoteSizeLimitExceeded { note_id: NoteId, note_size: usize }, +} + // TRANSACTION EVENT PARSING ERROR // ================================================================================================ #[derive(Debug, Error)] pub enum TransactionEventError { #[error("event id {0} is not a valid transaction event")] - InvalidTransactionEvent(EventId, Option<&'static str>), - #[error("event id {0} is not a transaction kernel event")] - NotTransactionEvent(EventId, Option<&'static str>), - #[error("event id {0} can only be emitted from the root context")] - NotRootContext(TransactionEventId), + InvalidTransactionEvent(EventId), } // TRANSACTION TRACE PARSING ERROR @@ -1158,5 +1213,5 @@ pub enum NullifierTreeError { #[derive(Debug, Error)] pub enum AuthSchemeError { #[error("auth scheme identifier `{0}` is not valid")] - InvalidAuthSchemeIdentifier(u8), + InvalidAuthSchemeIdentifier(String), } diff --git a/crates/miden-protocol/src/errors/protocol.rs b/crates/miden-protocol/src/errors/protocol.rs deleted file mode 100644 index dfbdfbf56a..0000000000 --- a/crates/miden-protocol/src/errors/protocol.rs +++ /dev/null @@ -1,42 +0,0 @@ -use crate::errors::MasmError; - -// This file is generated by build.rs, do not modify manually. -// It is generated by extracting errors from the MASM files in the `./asm` directory. -// -// To add a new error, define a constant in MASM of the pattern `const ERR__...`. -// Try to fit the error into a pre-existing category if possible (e.g. Account, Note, ...). - -// PROTOCOL LIB ERRORS -// ================================================================================================ - -/// Error Message: "the account ID must have storage mode public if the network flag is set" -pub const ERR_ACCOUNT_ID_NON_PUBLIC_NETWORK_ACCOUNT: MasmError = MasmError::from_static_str("the account ID must have storage mode public if the network flag is set"); -/// Error Message: "least significant byte of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("least significant byte of the account ID suffix must be zero"); -/// Error Message: "most significant bit of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO: MasmError = MasmError::from_static_str("most significant bit of the account ID suffix must be zero"); -/// Error Message: "unknown account storage mode in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE: MasmError = MasmError::from_static_str("unknown account storage mode in account ID"); -/// Error Message: "unknown version in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_VERSION: MasmError = MasmError::from_static_str("unknown version in account ID"); - -/// Error Message: "fungible asset build operation called with amount that exceeds the maximum allowed asset amount" -pub const ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_ALLOWED_AMOUNT: MasmError = MasmError::from_static_str("fungible asset build operation called with amount that exceeds the maximum allowed asset amount"); -/// Error Message: "failed to build the fungible asset because the provided faucet id is not from a fungible faucet" -pub const ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID: MasmError = MasmError::from_static_str("failed to build the fungible asset because the provided faucet id is not from a fungible faucet"); - -/// Error Message: "failed to build the non-fungible asset because the provided faucet id is not from a non-fungible faucet" -pub const ERR_NON_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID: MasmError = MasmError::from_static_str("failed to build the non-fungible asset because the provided faucet id is not from a non-fungible faucet"); - -/// Error Message: "note data does not match the commitment" -pub const ERR_NOTE_DATA_DOES_NOT_MATCH_COMMITMENT: MasmError = MasmError::from_static_str("note data does not match the commitment"); -/// Error Message: "the specified number of note storage items does not match the actual number" -pub const ERR_NOTE_INVALID_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("the specified number of note storage items does not match the actual number"); - -/// Error Message: "number of note storage exceeded the maximum limit of 1024" -pub const ERR_PROLOGUE_NOTE_NUM_STORAGE_ITEMS_EXCEEDED_LIMIT: MasmError = MasmError::from_static_str("number of note storage exceeded the maximum limit of 1024"); - -/// Error Message: "get_balance can only be called on a fungible asset" -pub const ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET: MasmError = MasmError::from_static_str("get_balance can only be called on a fungible asset"); -/// Error Message: "the has_non_fungible_asset procedure can only be called on a non-fungible faucet" -pub const ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET: MasmError = MasmError::from_static_str("the has_non_fungible_asset procedure can only be called on a non-fungible faucet"); diff --git a/crates/miden-protocol/src/errors/tx_kernel.rs b/crates/miden-protocol/src/errors/tx_kernel.rs deleted file mode 100644 index c311ad78c0..0000000000 --- a/crates/miden-protocol/src/errors/tx_kernel.rs +++ /dev/null @@ -1,218 +0,0 @@ -use crate::errors::MasmError; - -// This file is generated by build.rs, do not modify manually. -// It is generated by extracting errors from the MASM files in the `./asm` directory. -// -// To add a new error, define a constant in MASM of the pattern `const ERR__...`. -// Try to fit the error into a pre-existing category if possible (e.g. Account, Note, ...). - -// TX KERNEL ERRORS -// ================================================================================================ - -/// Error Message: "computed account code commitment does not match recorded account code commitment" -pub const ERR_ACCOUNT_CODE_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("computed account code commitment does not match recorded account code commitment"); -/// Error Message: "account code must be updatable for it to be possible to set new code" -pub const ERR_ACCOUNT_CODE_IS_NOT_UPDATABLE: MasmError = MasmError::from_static_str("account code must be updatable for it to be possible to set new code"); -/// Error Message: "nonce must be incremented if account vault or account storage changed" -pub const ERR_ACCOUNT_DELTA_NONCE_MUST_BE_INCREMENTED_IF_VAULT_OR_STORAGE_CHANGED: MasmError = MasmError::from_static_str("nonce must be incremented if account vault or account storage changed"); -/// Error Message: "the account ID must have storage mode public if the network flag is set" -pub const ERR_ACCOUNT_ID_NON_PUBLIC_NETWORK_ACCOUNT: MasmError = MasmError::from_static_str("the account ID must have storage mode public if the network flag is set"); -/// Error Message: "least significant byte of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("least significant byte of the account ID suffix must be zero"); -/// Error Message: "most significant bit of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO: MasmError = MasmError::from_static_str("most significant bit of the account ID suffix must be zero"); -/// Error Message: "unknown account storage mode in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE: MasmError = MasmError::from_static_str("unknown account storage mode in account ID"); -/// Error Message: "unknown version in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_VERSION: MasmError = MasmError::from_static_str("unknown version in account ID"); -/// Error Message: "the active account is not native" -pub const ERR_ACCOUNT_IS_NOT_NATIVE: MasmError = MasmError::from_static_str("the active account is not native"); -/// Error Message: "account nonce is already at its maximum possible value" -pub const ERR_ACCOUNT_NONCE_AT_MAX: MasmError = MasmError::from_static_str("account nonce is already at its maximum possible value"); -/// Error Message: "account nonce can only be incremented once" -pub const ERR_ACCOUNT_NONCE_CAN_ONLY_BE_INCREMENTED_ONCE: MasmError = MasmError::from_static_str("account nonce can only be incremented once"); -/// Error Message: "number of account procedures must be at least 2" -pub const ERR_ACCOUNT_NOT_ENOUGH_PROCEDURES: MasmError = MasmError::from_static_str("number of account procedures must be at least 2"); -/// Error Message: "provided procedure index is out of bounds" -pub const ERR_ACCOUNT_PROC_INDEX_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("provided procedure index is out of bounds"); -/// Error Message: "account procedure is not the authentication procedure; some procedures (e.g. `incr_nonce`) can be called only from the authentication procedure" -pub const ERR_ACCOUNT_PROC_NOT_AUTH_PROC: MasmError = MasmError::from_static_str("account procedure is not the authentication procedure; some procedures (e.g. `incr_nonce`) can be called only from the authentication procedure"); -/// Error Message: "procedure is not part of the account code" -pub const ERR_ACCOUNT_PROC_NOT_PART_OF_ACCOUNT_CODE: MasmError = MasmError::from_static_str("procedure is not part of the account code"); -/// Error Message: "failed to read an account map item from a non-map storage slot" -pub const ERR_ACCOUNT_READING_MAP_VALUE_FROM_NON_MAP_SLOT: MasmError = MasmError::from_static_str("failed to read an account map item from a non-map storage slot"); -/// Error Message: "ID of the new account does not match the ID computed from the seed and commitments" -pub const ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH: MasmError = MasmError::from_static_str("ID of the new account does not match the ID computed from the seed and commitments"); -/// Error Message: "failed to write an account map item to a non-map storage slot" -pub const ERR_ACCOUNT_SETTING_MAP_ITEM_ON_NON_MAP_SLOT: MasmError = MasmError::from_static_str("failed to write an account map item to a non-map storage slot"); -/// Error Message: "failed to write an account value item to a non-value storage slot" -pub const ERR_ACCOUNT_SETTING_VALUE_ITEM_ON_NON_VALUE_SLOT: MasmError = MasmError::from_static_str("failed to write an account value item to a non-value storage slot"); -/// Error Message: "depth of the nested FPI calls exceeded 64" -pub const ERR_ACCOUNT_STACK_OVERFLOW: MasmError = MasmError::from_static_str("depth of the nested FPI calls exceeded 64"); -/// Error Message: "failed to end foreign context because the active account is the native account" -pub const ERR_ACCOUNT_STACK_UNDERFLOW: MasmError = MasmError::from_static_str("failed to end foreign context because the active account is the native account"); -/// Error Message: "computed account storage commitment does not match recorded account storage commitment" -pub const ERR_ACCOUNT_STORAGE_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("computed account storage commitment does not match recorded account storage commitment"); -/// Error Message: "storage map entries provided as advice inputs do not have the same storage map root as the root of the map the new account commits to" -pub const ERR_ACCOUNT_STORAGE_MAP_ENTRIES_DO_NOT_MATCH_MAP_ROOT: MasmError = MasmError::from_static_str("storage map entries provided as advice inputs do not have the same storage map root as the root of the map the new account commits to"); -/// Error Message: "slot IDs must be unique and sorted in ascending order" -pub const ERR_ACCOUNT_STORAGE_SLOTS_MUST_BE_SORTED_AND_UNIQUE: MasmError = MasmError::from_static_str("slot IDs must be unique and sorted in ascending order"); -/// Error Message: "number of account procedures exceeds the maximum limit of 256" -pub const ERR_ACCOUNT_TOO_MANY_PROCEDURES: MasmError = MasmError::from_static_str("number of account procedures exceeds the maximum limit of 256"); -/// Error Message: "number of account storage slots exceeds the maximum limit of 255" -pub const ERR_ACCOUNT_TOO_MANY_STORAGE_SLOTS: MasmError = MasmError::from_static_str("number of account storage slots exceeds the maximum limit of 255"); -/// Error Message: "storage slot with the provided name does not exist" -pub const ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME: MasmError = MasmError::from_static_str("storage slot with the provided name does not exist"); - -/// Error Message: "auth procedure has been called from outside the epilogue" -pub const ERR_EPILOGUE_AUTH_PROCEDURE_CALLED_FROM_WRONG_CONTEXT: MasmError = MasmError::from_static_str("auth procedure has been called from outside the epilogue"); -/// Error Message: "executed transaction neither changed the account state, nor consumed any notes" -pub const ERR_EPILOGUE_EXECUTED_TRANSACTION_IS_EMPTY: MasmError = MasmError::from_static_str("executed transaction neither changed the account state, nor consumed any notes"); -/// Error Message: "nonce cannot be 0 after an account-creating transaction" -pub const ERR_EPILOGUE_NONCE_CANNOT_BE_0: MasmError = MasmError::from_static_str("nonce cannot be 0 after an account-creating transaction"); -/// Error Message: "total number of assets in the account and all involved notes must stay the same" -pub const ERR_EPILOGUE_TOTAL_NUMBER_OF_ASSETS_MUST_STAY_THE_SAME: MasmError = MasmError::from_static_str("total number of assets in the account and all involved notes must stay the same"); - -/// Error Message: "the burn_non_fungible_asset procedure can only be called on a non-fungible faucet" -pub const ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET: MasmError = MasmError::from_static_str("the burn_non_fungible_asset procedure can only be called on a non-fungible faucet"); - -/// Error Message: "creation of a foreign context against the native account is forbidden" -pub const ERR_FOREIGN_ACCOUNT_CONTEXT_AGAINST_NATIVE_ACCOUNT: MasmError = MasmError::from_static_str("creation of a foreign context against the native account is forbidden"); -/// Error Message: "ID of the provided foreign account equals zero indicating that tx_prepare_fpi was not called" -pub const ERR_FOREIGN_ACCOUNT_ID_IS_ZERO: MasmError = MasmError::from_static_str("ID of the provided foreign account equals zero indicating that tx_prepare_fpi was not called"); -/// Error Message: "commitment of the foreign account in the advice provider does not match the commitment in the account tree" -pub const ERR_FOREIGN_ACCOUNT_INVALID_COMMITMENT: MasmError = MasmError::from_static_str("commitment of the foreign account in the advice provider does not match the commitment in the account tree"); -/// Error Message: "maximum allowed number of foreign account to be loaded (64) was exceeded" -pub const ERR_FOREIGN_ACCOUNT_MAX_NUMBER_EXCEEDED: MasmError = MasmError::from_static_str("maximum allowed number of foreign account to be loaded (64) was exceeded"); -/// Error Message: "root of the provided foreign procedure equals zero indicating that tx_prepare_fpi was not called" -pub const ERR_FOREIGN_ACCOUNT_PROCEDURE_ROOT_IS_ZERO: MasmError = MasmError::from_static_str("root of the provided foreign procedure equals zero indicating that tx_prepare_fpi was not called"); - -/// Error Message: "the origin of the fungible asset is not this faucet" -pub const ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN: MasmError = MasmError::from_static_str("the origin of the fungible asset is not this faucet"); -/// Error Message: "malformed fungible asset: `ASSET[1]` must be 0" -pub const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ONE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("malformed fungible asset: `ASSET[1]` must be 0"); -/// Error Message: "malformed fungible asset: `ASSET[2]` and `ASSET[3]` must be a valid fungible faucet id" -pub const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_TWO_AND_THREE_MUST_BE_FUNGIBLE_FAUCET_ID: MasmError = MasmError::from_static_str("malformed fungible asset: `ASSET[2]` and `ASSET[3]` must be a valid fungible faucet id"); -/// Error Message: "malformed fungible asset: `ASSET[0]` exceeds the maximum allowed amount" -pub const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS: MasmError = MasmError::from_static_str("malformed fungible asset: `ASSET[0]` exceeds the maximum allowed amount"); - -/// Error Message: "requested input note index should be less than the total number of input notes" -pub const ERR_INPUT_NOTE_INDEX_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("requested input note index should be less than the total number of input notes"); - -/// Error Message: "provided kernel procedure offset is out of bounds" -pub const ERR_KERNEL_PROCEDURE_OFFSET_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("provided kernel procedure offset is out of bounds"); - -/// Error Message: "map cannot be empty when proving absence after an entry" -pub const ERR_LINK_MAP_CANNOT_BE_EMPTY_ON_ABSENCE_AFTER_ENTRY: MasmError = MasmError::from_static_str("map cannot be empty when proving absence after an entry"); -/// Error Message: "host-provided entry ptr is not 'link map entry'-aligned" -pub const ERR_LINK_MAP_ENTRY_PTR_IS_NOT_ENTRY_ALIGNED: MasmError = MasmError::from_static_str("host-provided entry ptr is not 'link map entry'-aligned"); -/// Error Message: "host-provided entry ptr is outside the valid memory region" -pub const ERR_LINK_MAP_ENTRY_PTR_IS_OUTSIDE_VALID_MEMORY_REGION: MasmError = MasmError::from_static_str("host-provided entry ptr is outside the valid memory region"); -/// Error Message: "map ptr stored in host-provided entry does not match actual pointer of the map" -pub const ERR_LINK_MAP_MAP_PTR_IN_ENTRY_DOES_NOT_MATCH_EXPECTED_MAP_PTR: MasmError = MasmError::from_static_str("map ptr stored in host-provided entry does not match actual pointer of the map"); -/// Error Message: "number of link map entries exceeds maximum" -pub const ERR_LINK_MAP_MAX_ENTRIES_EXCEEDED: MasmError = MasmError::from_static_str("number of link map entries exceeds maximum"); -/// Error Message: "provided key does not match key in map entry" -pub const ERR_LINK_MAP_PROVIDED_KEY_NOT_EQUAL_TO_ENTRY_KEY: MasmError = MasmError::from_static_str("provided key does not match key in map entry"); -/// Error Message: "provided key is not greater than the entry key" -pub const ERR_LINK_MAP_PROVIDED_KEY_NOT_GREATER_THAN_ENTRY_KEY: MasmError = MasmError::from_static_str("provided key is not greater than the entry key"); -/// Error Message: "provided key is not less than the entry key" -pub const ERR_LINK_MAP_PROVIDED_KEY_NOT_LESS_THAN_ENTRY_KEY: MasmError = MasmError::from_static_str("provided key is not less than the entry key"); - -/// Error Message: "non-fungible asset that already exists in the note cannot be added again" -pub const ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS: MasmError = MasmError::from_static_str("non-fungible asset that already exists in the note cannot be added again"); -/// Error Message: "the origin of the non-fungible asset is not this faucet" -pub const ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN: MasmError = MasmError::from_static_str("the origin of the non-fungible asset is not this faucet"); -/// Error Message: "malformed non-fungible asset: `ASSET[3]` is not a valid non-fungible faucet id" -pub const ERR_NON_FUNGIBLE_ASSET_FORMAT_ELEMENT_THREE_MUST_BE_FUNGIBLE_FAUCET_ID: MasmError = MasmError::from_static_str("malformed non-fungible asset: `ASSET[3]` is not a valid non-fungible faucet id"); -/// Error Message: "malformed non-fungible asset: the most significant bit must be 0" -pub const ERR_NON_FUNGIBLE_ASSET_FORMAT_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO: MasmError = MasmError::from_static_str("malformed non-fungible asset: the most significant bit must be 0"); - -/// Error Message: "failed to access note assets of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_ASSETS_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note assets of active note because no note is currently being processed"); -/// Error Message: "failed to access note metadata of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_METADATA_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note metadata of active note because no note is currently being processed"); -/// Error Message: "failed to access note recipient of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_RECIPIENT_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note recipient of active note because no note is currently being processed"); -/// Error Message: "failed to access note script root of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_SCRIPT_ROOT_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note script root of active note because no note is currently being processed"); -/// Error Message: "failed to access note serial number of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_SERIAL_NUMBER_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note serial number of active note because no note is currently being processed"); -/// Error Message: "failed to access note storage of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_STORAGE_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note storage of active note because no note is currently being processed"); -/// Error Message: "adding a fungible asset to a note cannot exceed the max_amount of 9223372036854775807" -pub const ERR_NOTE_FUNGIBLE_MAX_AMOUNT_EXCEEDED: MasmError = MasmError::from_static_str("adding a fungible asset to a note cannot exceed the max_amount of 9223372036854775807"); -/// Error Message: "failed to find note at the given index; index must be within [0, num_of_notes]" -pub const ERR_NOTE_INVALID_INDEX: MasmError = MasmError::from_static_str("failed to find note at the given index; index must be within [0, num_of_notes]"); -/// Error Message: "invalid note type" -pub const ERR_NOTE_INVALID_TYPE: MasmError = MasmError::from_static_str("invalid note type"); -/// Error Message: "number of assets in a note exceed 255" -pub const ERR_NOTE_NUM_OF_ASSETS_EXCEED_LIMIT: MasmError = MasmError::from_static_str("number of assets in a note exceed 255"); -/// Error Message: "the note's tag must fit into a u32 so the 32 most significant bits of the felt must be zero" -pub const ERR_NOTE_TAG_MUST_BE_U32: MasmError = MasmError::from_static_str("the note's tag must fit into a u32 so the 32 most significant bits of the felt must be zero"); - -/// Error Message: "attachment kind None requires ATTACHMENT to be set to an empty word" -pub const ERR_OUTPUT_NOTE_ATTACHMENT_KIND_NONE_MUST_BE_EMPTY_WORD: MasmError = MasmError::from_static_str("attachment kind None requires ATTACHMENT to be set to an empty word"); -/// Error Message: "attachment kind none must have attachment scheme none" -pub const ERR_OUTPUT_NOTE_ATTACHMENT_KIND_NONE_MUST_HAVE_ATTACHMENT_SCHEME_NONE: MasmError = MasmError::from_static_str("attachment kind none must have attachment scheme none"); -/// Error Message: "requested output note index should be less than the total number of created output notes" -pub const ERR_OUTPUT_NOTE_INDEX_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("requested output note index should be less than the total number of created output notes"); -/// Error Message: "attachment scheme and attachment kind must fit into u32s" -pub const ERR_OUTPUT_NOTE_INVALID_ATTACHMENT_SCHEMES: MasmError = MasmError::from_static_str("attachment scheme and attachment kind must fit into u32s"); -/// Error Message: "attachment kind variant must be between 0 and 2" -pub const ERR_OUTPUT_NOTE_UNKNOWN_ATTACHMENT_KIND: MasmError = MasmError::from_static_str("attachment kind variant must be between 0 and 2"); - -/// Error Message: "existing accounts must have a non-zero nonce" -pub const ERR_PROLOGUE_EXISTING_ACCOUNT_MUST_HAVE_NON_ZERO_NONCE: MasmError = MasmError::from_static_str("existing accounts must have a non-zero nonce"); -/// Error Message: "the provided global inputs do not match the block commitment" -pub const ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_COMMITMENT: MasmError = MasmError::from_static_str("the provided global inputs do not match the block commitment"); -/// Error Message: "the provided global inputs do not match the block number commitment" -pub const ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_NUMBER_COMMITMENT: MasmError = MasmError::from_static_str("the provided global inputs do not match the block number commitment"); -/// Error Message: "note commitment computed from the input note data does not match given note commitment" -pub const ERR_PROLOGUE_INPUT_NOTES_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("note commitment computed from the input note data does not match given note commitment"); -/// Error Message: "sequential hash over kernel procedures does not match kernel commitment from block" -pub const ERR_PROLOGUE_KERNEL_PROCEDURE_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("sequential hash over kernel procedures does not match kernel commitment from block"); -/// Error Message: "account IDs provided via global inputs and advice provider do not match" -pub const ERR_PROLOGUE_MISMATCH_OF_ACCOUNT_IDS_FROM_GLOBAL_INPUTS_AND_ADVICE_PROVIDER: MasmError = MasmError::from_static_str("account IDs provided via global inputs and advice provider do not match"); -/// Error Message: "reference block MMR and note's authentication MMR must match" -pub const ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR: MasmError = MasmError::from_static_str("reference block MMR and note's authentication MMR must match"); -/// Error Message: "native asset account ID in reference block is not of type fungible faucet" -pub const ERR_PROLOGUE_NATIVE_ASSET_ID_IS_NOT_FUNGIBLE: MasmError = MasmError::from_static_str("native asset account ID in reference block is not of type fungible faucet"); -/// Error Message: "new account must have a zero nonce" -pub const ERR_PROLOGUE_NEW_ACCOUNT_NONCE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("new account must have a zero nonce"); -/// Error Message: "new account must have an empty vault" -pub const ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY: MasmError = MasmError::from_static_str("new account must have an empty vault"); -/// Error Message: "failed to authenticate note inclusion in block" -pub const ERR_PROLOGUE_NOTE_AUTHENTICATION_FAILED: MasmError = MasmError::from_static_str("failed to authenticate note inclusion in block"); -/// Error Message: "number of input notes exceeds the kernel's maximum limit of 1024" -pub const ERR_PROLOGUE_NUMBER_OF_INPUT_NOTES_EXCEEDS_LIMIT: MasmError = MasmError::from_static_str("number of input notes exceeds the kernel's maximum limit of 1024"); -/// Error Message: "number of note assets exceeds the maximum limit of 256" -pub const ERR_PROLOGUE_NUMBER_OF_NOTE_ASSETS_EXCEEDS_LIMIT: MasmError = MasmError::from_static_str("number of note assets exceeds the maximum limit of 256"); -/// Error Message: "number of note storage items exceeded the maximum limit of 1024" -pub const ERR_PROLOGUE_NUMBER_OF_NOTE_STORAGE_ITEMS_EXCEEDED_LIMIT: MasmError = MasmError::from_static_str("number of note storage items exceeded the maximum limit of 1024"); -/// Error Message: "account data provided does not match the commitment recorded on-chain" -pub const ERR_PROLOGUE_PROVIDED_ACCOUNT_DATA_DOES_NOT_MATCH_ON_CHAIN_COMMITMENT: MasmError = MasmError::from_static_str("account data provided does not match the commitment recorded on-chain"); -/// Error Message: "provided info about assets of an input does not match its commitment" -pub const ERR_PROLOGUE_PROVIDED_INPUT_ASSETS_INFO_DOES_NOT_MATCH_ITS_COMMITMENT: MasmError = MasmError::from_static_str("provided info about assets of an input does not match its commitment"); -/// Error Message: "verification base fee must fit into a u32" -pub const ERR_PROLOGUE_VERIFICATION_BASE_FEE_MUST_BE_U32: MasmError = MasmError::from_static_str("verification base fee must fit into a u32"); - -/// Error Message: "transaction expiration block delta must be within 0x1 and 0xFFFF" -pub const ERR_TX_INVALID_EXPIRATION_DELTA: MasmError = MasmError::from_static_str("transaction expiration block delta must be within 0x1 and 0xFFFF"); -/// Error Message: "number of output notes in the transaction exceeds the maximum limit of 1024" -pub const ERR_TX_NUMBER_OF_OUTPUT_NOTES_EXCEEDS_LIMIT: MasmError = MasmError::from_static_str("number of output notes in the transaction exceeds the maximum limit of 1024"); -/// Error Message: "the transaction script is missing" -pub const ERR_TX_TRANSACTION_SCRIPT_IS_MISSING: MasmError = MasmError::from_static_str("the transaction script is missing"); - -/// Error Message: "failed to add fungible asset to the asset vault due to the initial value being invalid" -pub const ERR_VAULT_ADD_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID: MasmError = MasmError::from_static_str("failed to add fungible asset to the asset vault due to the initial value being invalid"); -/// Error Message: "failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove" -pub const ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW: MasmError = MasmError::from_static_str("failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove"); -/// Error Message: "adding the fungible asset to the vault would exceed the max amount of 9223372036854775807" -pub const ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED: MasmError = MasmError::from_static_str("adding the fungible asset to the vault would exceed the max amount of 9223372036854775807"); -/// Error Message: "the non-fungible asset already exists in the asset vault" -pub const ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS: MasmError = MasmError::from_static_str("the non-fungible asset already exists in the asset vault"); -/// Error Message: "failed to remove non-existent non-fungible asset from the vault" -pub const ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND: MasmError = MasmError::from_static_str("failed to remove non-existent non-fungible asset from the vault"); -/// Error Message: "failed to remove fungible asset from the asset vault due to the initial value being invalid" -pub const ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID: MasmError = MasmError::from_static_str("failed to remove fungible asset from the asset vault due to the initial value being invalid"); diff --git a/crates/miden-protocol/src/lib.rs b/crates/miden-protocol/src/lib.rs index 9047f49bf4..db3843aa68 100644 --- a/crates/miden-protocol/src/lib.rs +++ b/crates/miden-protocol/src/lib.rs @@ -27,9 +27,9 @@ mod constants; pub use constants::*; pub use miden_core::mast::{MastForest, MastNodeId}; pub use miden_core::prettier::PrettyPrint; -pub use miden_core::{EMPTY_WORD, Felt, FieldElement, ONE, StarkField, WORD_SIZE, ZERO}; +pub use miden_core::{EMPTY_WORD, Felt, ONE, WORD_SIZE, ZERO, field}; pub use miden_core_lib::CoreLibrary; -pub use miden_crypto::hash::rpo::Rpo256 as Hasher; +pub use miden_crypto::hash::poseidon2::Poseidon2 as Hasher; pub use miden_crypto::word; pub use miden_crypto::word::{LexicographicWord, Word, WordError}; pub use protocol::ProtocolLib; @@ -66,20 +66,23 @@ pub mod utils { pub use miden_utils_sync as sync; pub mod serde { - pub use miden_core::utils::{ + pub use miden_crypto::utils::{ + BudgetedReader, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, + SliceReader, }; } } pub mod vm { pub use miden_assembly_syntax::ast::{AttributeSet, QualifiedProcedureName}; - pub use miden_core::sys_events::SystemEvent; - pub use miden_core::{AdviceMap, EventId, Program, ProgramInfo}; + pub use miden_core::advice::{AdviceInputs, AdviceMap}; + pub use miden_core::events::{EventId, EventName, SystemEvent}; + pub use miden_core::program::{Program, ProgramInfo}; pub use miden_mast_package::{ MastArtifact, Package, @@ -90,6 +93,7 @@ pub mod vm { Section, SectionId, }; - pub use miden_processor::{AdviceInputs, FutureMaybeSend, RowIndex, StackInputs, StackOutputs}; + pub use miden_processor::trace::RowIndex; + pub use miden_processor::{FutureMaybeSend, StackInputs, StackOutputs}; pub use miden_verifier::ExecutionProof; } diff --git a/crates/miden-protocol/src/note/assets.rs b/crates/miden-protocol/src/note/assets.rs index 260274eeaa..d8f0ec17f9 100644 --- a/crates/miden-protocol/src/note/assets.rs +++ b/crates/miden-protocol/src/note/assets.rs @@ -1,5 +1,7 @@ use alloc::vec::Vec; +use miden_crypto::SequentialCommit; + use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; use crate::errors::NoteError; use crate::utils::serde::{ @@ -9,13 +11,14 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::{Felt, Hasher, MAX_ASSETS_PER_NOTE, WORD_SIZE, Word, ZERO}; +use crate::{Felt, Hasher, MAX_ASSETS_PER_NOTE, WORD_SIZE, Word}; // NOTE ASSETS // ================================================================================================ + /// An asset container for a note. /// -/// A note can contain between 0 and 256 assets. No duplicates are allowed, but the order of assets +/// A note can contain between 0 and 255 assets. No duplicates are allowed, but the order of assets /// is unspecified. /// /// All the assets in a note can be reduced to a single commitment which is computed by @@ -24,7 +27,7 @@ use crate::{Felt, Hasher, MAX_ASSETS_PER_NOTE, WORD_SIZE, Word, ZERO}; #[derive(Debug, Default, Clone)] pub struct NoteAssets { assets: Vec, - hash: Word, + commitment: Word, } impl NoteAssets { @@ -60,8 +63,9 @@ impl NoteAssets { } } - let hash = compute_asset_commitment(&assets); - Ok(Self { assets, hash }) + let commitment = to_commitment(&assets); + + Ok(Self { assets, commitment }) } // PUBLIC ACCESSORS @@ -69,7 +73,7 @@ impl NoteAssets { /// Returns a commitment to the note's assets. pub fn commitment(&self) -> Word { - self.hash + self.commitment } /// Returns the number of assets. @@ -88,27 +92,8 @@ impl NoteAssets { } /// Returns all assets represented as a vector of field elements. - /// - /// The vector is padded with ZEROs so that its length is a multiple of 8. This is useful - /// because hashing the returned elements results in the note asset commitment. - pub fn to_padded_assets(&self) -> Vec { - // if we have an odd number of assets with pad with a single word. - let padded_len = if self.assets.len().is_multiple_of(2) { - self.assets.len() * WORD_SIZE - } else { - (self.assets.len() + 1) * WORD_SIZE - }; - - // allocate a vector to hold the padded assets - let mut padded_assets = Vec::with_capacity(padded_len * WORD_SIZE); - - // populate the vector with the assets - padded_assets.extend(self.assets.iter().flat_map(|asset| Word::from(*asset))); - - // pad with an empty word if we have an odd number of assets - padded_assets.resize(padded_len, ZERO); - - padded_assets + pub fn to_elements(&self) -> Vec { + ::to_elements(self) } /// Returns an iterator over all [`FungibleAsset`]. @@ -127,45 +112,9 @@ impl NoteAssets { }) } - // STATE MUTATORS - // -------------------------------------------------------------------------------------------- - - /// Adds the provided asset to this list of note assets. - /// - /// # Errors - /// Returns an error if: - /// - The same non-fungible asset is already in the list. - /// - A fungible asset issued by the same faucet exists in the list and adding both assets - /// together results in an invalid asset. - /// - Adding the asset to the list will push the list beyond the [Self::MAX_NUM_ASSETS] limit. - pub fn add_asset(&mut self, asset: Asset) -> Result<(), NoteError> { - // check if the asset issued by the faucet as the provided asset already exists in the - // list of assets - if let Some(own_asset) = self.assets.iter_mut().find(|a| a.is_same(&asset)) { - match own_asset { - Asset::Fungible(f_own_asset) => { - // if a fungible asset issued by the same faucet is found, try to add the - // the provided asset to it - let new_asset = f_own_asset - .add(asset.unwrap_fungible()) - .map_err(NoteError::AddFungibleAssetBalanceError)?; - *own_asset = Asset::Fungible(new_asset); - }, - Asset::NonFungible(nf_asset) => { - return Err(NoteError::DuplicateNonFungibleAsset(*nf_asset)); - }, - } - } else { - // if the asset is not in the list, add it to the list - self.assets.push(asset); - if self.assets.len() > Self::MAX_NUM_ASSETS { - return Err(NoteError::TooManyAssets(self.assets.len())); - } - } - - self.hash = compute_asset_commitment(&self.assets); - - Ok(()) + /// Consumes self and returns the underlying vector of assets. + pub fn into_vec(self) -> Vec { + self.assets } } @@ -177,42 +126,28 @@ impl PartialEq for NoteAssets { impl Eq for NoteAssets {} -// HELPER FUNCTIONS -// ================================================================================================ +impl SequentialCommit for NoteAssets { + type Commitment = Word; -/// Returns a commitment to a note's assets. -/// -/// The commitment is computed as a sequential hash of all assets (each asset represented by 4 -/// field elements), padded to the next multiple of 2. If the asset list is empty, a default digest -/// is returned. -fn compute_asset_commitment(assets: &[Asset]) -> Word { - if assets.is_empty() { - return Word::empty(); + /// Returns all assets represented as a vector of field elements. + fn to_elements(&self) -> Vec { + to_elements(&self.assets) } - // If we have an odd number of assets we pad the vector with 4 zero elements. This is to - // ensure the number of elements is a multiple of 8 - the size of the hasher rate. - let word_capacity = if assets.len().is_multiple_of(2) { - assets.len() - } else { - assets.len() + 1 - }; - let mut asset_elements = Vec::with_capacity(word_capacity * WORD_SIZE); - - for asset in assets.iter() { - // convert the asset into field elements and add them to the list elements - let asset_word: Word = (*asset).into(); - asset_elements.extend_from_slice(asset_word.as_elements()); + /// Computes the commitment to the assets. + fn to_commitment(&self) -> Self::Commitment { + to_commitment(&self.assets) } +} - // If we have an odd number of assets we pad the vector with 4 zero elements. This is to - // ensure the number of elements is a multiple of 8 - the size of the hasher rate. This - // simplifies hashing inside of the virtual machine when ingesting assets from a note. - if assets.len() % 2 == 1 { - asset_elements.extend_from_slice(Word::empty().as_elements()); - } +fn to_elements(assets: &[Asset]) -> Vec { + let mut elements = Vec::with_capacity(assets.len() * 2 * WORD_SIZE); + elements.extend(assets.iter().flat_map(Asset::as_elements)); + elements +} - Hasher::hash_elements(&asset_elements) +fn to_commitment(assets: &[Asset]) -> Word { + Hasher::hash_elements(&to_elements(assets)) } // SERIALIZATION @@ -225,12 +160,21 @@ impl Serializable for NoteAssets { target.write_u8(self.assets.len().try_into().expect("Asset number must fit into `u8`")); target.write_many(&self.assets); } + + fn get_size_hint(&self) -> usize { + // Size of the serialized asset count prefix. + let u8_size = 0u8.get_size_hint(); + + let assets_size: usize = self.assets.iter().map(|asset| asset.get_size_hint()).sum(); + + u8_size + assets_size + } } impl Deserializable for NoteAssets { fn read_from(source: &mut R) -> Result { let count = source.read_u8()?; - let assets = source.read_many::(count.into())?; + let assets = source.read_many_iter::(count.into())?.collect::>()?; Self::new(assets).map_err(|e| DeserializationError::InvalidValue(format!("{e:?}"))) } } @@ -240,8 +184,7 @@ impl Deserializable for NoteAssets { #[cfg(test)] mod tests { - use super::{NoteAssets, compute_asset_commitment}; - use crate::Word; + use super::NoteAssets; use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; use crate::testing::account_id::{ @@ -250,35 +193,12 @@ mod tests { ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, }; - #[test] - fn add_asset() { - let faucet_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); - - let asset1 = Asset::Fungible(FungibleAsset::new(faucet_id, 100).unwrap()); - let asset2 = Asset::Fungible(FungibleAsset::new(faucet_id, 50).unwrap()); - - // create empty assets - let mut assets = NoteAssets::default(); - - assert_eq!(assets.hash, Word::empty()); - - // add asset1 - assert!(assets.add_asset(asset1).is_ok()); - assert_eq!(assets.assets, vec![asset1]); - assert_eq!(assets.hash, compute_asset_commitment(&[asset1])); - - // add asset2 - assert!(assets.add_asset(asset2).is_ok()); - let expected_asset = Asset::Fungible(FungibleAsset::new(faucet_id, 150).unwrap()); - assert_eq!(assets.assets, vec![expected_asset]); - assert_eq!(assets.hash, compute_asset_commitment(&[expected_asset])); - } #[test] fn iter_fungible_asset() { let faucet_id_1 = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); let faucet_id_2 = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); let account_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); + let details = NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(); let asset1 = Asset::Fungible(FungibleAsset::new(faucet_id_1, 100).unwrap()); let asset2 = Asset::Fungible(FungibleAsset::new(faucet_id_2, 50).unwrap()); diff --git a/crates/miden-protocol/src/note/attachment.rs b/crates/miden-protocol/src/note/attachment.rs index 56a46e4b7d..fa8e567341 100644 --- a/crates/miden-protocol/src/note/attachment.rs +++ b/crates/miden-protocol/src/note/attachment.rs @@ -3,7 +3,13 @@ use alloc::vec::Vec; use crate::crypto::SequentialCommit; use crate::errors::NoteError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Hasher, Word}; // NOTE ATTACHMENT @@ -110,6 +116,10 @@ impl Serializable for NoteAttachment { self.attachment_scheme().write_into(target); self.content().write_into(target); } + + fn get_size_hint(&self) -> usize { + self.attachment_scheme().get_size_hint() + self.content().get_size_hint() + } } impl Deserializable for NoteAttachment { @@ -211,6 +221,19 @@ impl Serializable for NoteAttachmentContent { }, } } + + fn get_size_hint(&self) -> usize { + let kind_size = self.attachment_kind().get_size_hint(); + match self { + NoteAttachmentContent::None => kind_size, + NoteAttachmentContent::Word(word) => kind_size + word.get_size_hint(), + NoteAttachmentContent::Array(attachment_commitment) => { + kind_size + + attachment_commitment.num_elements().get_size_hint() + + attachment_commitment.elements.len() * crate::ZERO.get_size_hint() + }, + } + } } impl Deserializable for NoteAttachmentContent { @@ -225,7 +248,8 @@ impl Deserializable for NoteAttachmentContent { }, NoteAttachmentKind::Array => { let num_elements = u16::read_from(source)?; - let elements = source.read_many(num_elements as usize)?; + let elements = + source.read_many_iter(num_elements as usize)?.collect::>()?; Self::new_array(elements) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) }, @@ -374,6 +398,10 @@ impl Serializable for NoteAttachmentScheme { fn write_into(&self, target: &mut W) { self.as_u32().write_into(target); } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteAttachmentScheme { @@ -464,6 +492,10 @@ impl Serializable for NoteAttachmentKind { fn write_into(&self, target: &mut W) { self.as_u8().write_into(target); } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteAttachmentKind { diff --git a/crates/miden-protocol/src/note/details.rs b/crates/miden-protocol/src/note/details.rs index f913f6ade6..14bad33b70 100644 --- a/crates/miden-protocol/src/note/details.rs +++ b/crates/miden-protocol/src/note/details.rs @@ -1,8 +1,12 @@ -use miden_processor::DeserializationError; - use super::{NoteAssets, NoteId, NoteRecipient, NoteScript, NoteStorage, Nullifier}; use crate::Word; -use crate::utils::serde::{ByteReader, ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // NOTE DETAILS // ================================================================================================ @@ -67,6 +71,14 @@ impl NoteDetails { Nullifier::from(self) } + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Reduces the size of the note script by stripping all debug info from it. + pub fn minify_script(&mut self) { + self.recipient.minify_script(); + } + /// Decomposes note details into underlying assets and recipient. pub fn into_parts(self) -> (NoteAssets, NoteRecipient) { (self.assets, self.recipient) @@ -92,6 +104,10 @@ impl Serializable for NoteDetails { assets.write_into(target); recipient.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.assets.get_size_hint() + self.recipient.get_size_hint() + } } impl Deserializable for NoteDetails { diff --git a/crates/miden-protocol/src/note/file.rs b/crates/miden-protocol/src/note/file.rs index 4a0d21001b..44aac4ddfe 100644 --- a/crates/miden-protocol/src/note/file.rs +++ b/crates/miden-protocol/src/note/file.rs @@ -6,13 +6,17 @@ use std::{ vec::Vec, }; -#[cfg(feature = "std")] -use miden_core::utils::SliceReader; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - use super::{Note, NoteDetails, NoteId, NoteInclusionProof, NoteTag}; use crate::block::BlockNumber; +#[cfg(feature = "std")] +use crate::utils::serde::SliceReader; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; const MAGIC: &str = "note"; @@ -137,8 +141,6 @@ impl Deserializable for NoteFile { mod tests { use alloc::vec::Vec; - use miden_core::utils::{Deserializable, Serializable}; - use crate::Word; use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset}; @@ -159,6 +161,7 @@ mod tests { ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, }; + use crate::utils::serde::{Deserializable, Serializable}; fn create_example_note() -> Note { let faucet = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); diff --git a/crates/miden-protocol/src/note/header.rs b/crates/miden-protocol/src/note/header.rs index 04aac21de4..f0ca1c1265 100644 --- a/crates/miden-protocol/src/note/header.rs +++ b/crates/miden-protocol/src/note/header.rs @@ -77,6 +77,10 @@ impl Serializable for NoteHeader { self.note_id.write_into(target); self.note_metadata.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.note_id.get_size_hint() + self.note_metadata.get_size_hint() + } } impl Deserializable for NoteHeader { diff --git a/crates/miden-protocol/src/note/metadata.rs b/crates/miden-protocol/src/note/metadata.rs index d21afc4c3a..04c36b9c08 100644 --- a/crates/miden-protocol/src/note/metadata.rs +++ b/crates/miden-protocol/src/note/metadata.rs @@ -43,8 +43,8 @@ use crate::note::{NoteAttachment, NoteAttachmentKind, NoteAttachmentScheme}; /// The felt validity of each part of the layout is guaranteed: /// - 1st felt: The lower 8 bits of the account ID suffix are `0` by construction, so that they can /// be overwritten with other data. The suffix' most significant bit must be zero such that the -/// entire felt retains its validity even if all of its lower 8 bits are be set to `1`. So the -/// note type can be comfortably encoded. +/// entire felt retains its validity even if all of its lower 8 bits are set to `1`. So the note +/// type can be comfortably encoded. /// - 2nd felt: Is equivalent to the prefix of the account ID so it inherits its validity. /// - 3rd felt: The upper 32 bits are always zero. /// - 4th felt: The upper 30 bits are always zero. @@ -89,6 +89,38 @@ impl NoteMetadata { } } + /// Reconstructs a [`NoteMetadata`] from a [`NoteMetadataHeader`] and a + /// [`NoteAttachment`]. + /// + /// # Errors + /// + /// Returns an error if the attachment's kind or scheme do not match those in the header. + pub fn try_from_header( + header: NoteMetadataHeader, + attachment: NoteAttachment, + ) -> Result { + if header.attachment_kind != attachment.attachment_kind() { + return Err(NoteError::AttachmentKindMismatch { + header_kind: header.attachment_kind, + attachment_kind: attachment.attachment_kind(), + }); + } + + if header.attachment_scheme != attachment.attachment_scheme() { + return Err(NoteError::AttachmentSchemeMismatch { + header_scheme: header.attachment_scheme, + attachment_scheme: attachment.attachment_scheme(), + }); + } + + Ok(Self { + sender: header.sender, + note_type: header.note_type, + tag: header.tag, + attachment, + }) + } + // ACCESSORS // -------------------------------------------------------------------------------------------- @@ -120,7 +152,7 @@ impl NoteMetadata { /// Returns the header of a [`NoteMetadata`] as a [`Word`]. /// /// See [`NoteMetadata`] docs for more details. - fn to_header(&self) -> NoteMetadataHeader { + pub fn to_header(&self) -> NoteMetadataHeader { NoteMetadataHeader { sender: self.sender, note_type: self.note_type, @@ -193,6 +225,13 @@ impl Serializable for NoteMetadata { self.tag().write_into(target); self.attachment().write_into(target); } + + fn get_size_hint(&self) -> usize { + self.note_type().get_size_hint() + + self.sender().get_size_hint() + + self.tag().get_size_hint() + + self.attachment().get_size_hint() + } } impl Deserializable for NoteMetadata { @@ -212,10 +251,8 @@ impl Deserializable for NoteMetadata { /// The header representation of [`NoteMetadata`]. /// /// See the metadata's type for details on this type's [`Word`] layout. -/// -/// This is intended to be a private type meant for encapsulating the conversion from and to words. #[derive(Clone, Copy, Debug, Eq, PartialEq)] -struct NoteMetadataHeader { +pub struct NoteMetadataHeader { sender: AccountId, note_type: NoteType, tag: NoteTag, @@ -223,6 +260,36 @@ struct NoteMetadataHeader { attachment_scheme: NoteAttachmentScheme, } +impl NoteMetadataHeader { + // ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the account which created the note. + pub fn sender(&self) -> AccountId { + self.sender + } + + /// Returns the note's type. + pub fn note_type(&self) -> NoteType { + self.note_type + } + + /// Returns the tag associated with the note. + pub fn tag(&self) -> NoteTag { + self.tag + } + + /// Returns the attachment kind. + pub fn attachment_kind(&self) -> NoteAttachmentKind { + self.attachment_kind + } + + /// Returns the attachment scheme. + pub fn attachment_scheme(&self) -> NoteAttachmentScheme { + self.attachment_scheme + } +} + impl From for Word { fn from(header: NoteMetadataHeader) -> Self { let mut metadata = Word::empty(); @@ -244,14 +311,18 @@ impl TryFrom for NoteMetadataHeader { fn try_from(word: Word) -> Result { let (sender_suffix, note_type) = unmerge_sender_suffix_and_note_type(word[0])?; let sender_prefix = word[1]; - let tag = u32::try_from(word[2]).map(NoteTag::new).map_err(|_| { + let tag = u32::try_from(word[2].as_canonical_u64()).map(NoteTag::new).map_err(|_| { NoteError::other("failed to convert note tag from metadata header to u32") })?; let (attachment_kind, attachment_scheme) = unmerge_attachment_kind_scheme(word[3])?; - let sender = AccountId::try_from([sender_prefix, sender_suffix]).map_err(|source| { - NoteError::other_with_source("failed to decode account ID from metadata header", source) - })?; + let sender = + AccountId::try_from_elements(sender_suffix, sender_prefix).map_err(|source| { + NoteError::other_with_source( + "failed to decode account ID from metadata header", + source, + ) + })?; Ok(Self { sender, @@ -279,7 +350,7 @@ impl TryFrom for NoteMetadataHeader { /// /// The `sender_id_suffix` is the suffix of the sender's account ID. fn merge_sender_suffix_and_note_type(sender_id_suffix: Felt, note_type: NoteType) -> Felt { - let mut merged = sender_id_suffix.as_int(); + let mut merged = sender_id_suffix.as_canonical_u64(); let note_type_byte = note_type as u8; debug_assert!(note_type_byte < 4, "note type must not contain values >= 4"); @@ -296,14 +367,14 @@ fn unmerge_sender_suffix_and_note_type(element: Felt) -> Result<(Felt, NoteType) // Inverts the note type mask. const SENDER_SUFFIX_MASK: u64 = !(NOTE_TYPE_MASK as u64); - let note_type_byte = element.as_int() as u8 & NOTE_TYPE_MASK; + let note_type_byte = element.as_canonical_u64() as u8 & NOTE_TYPE_MASK; let note_type = NoteType::try_from(note_type_byte).map_err(|source| { NoteError::other_with_source("failed to decode note type from metadata header", source) })?; // No bits were set so felt should still be valid. - let sender_suffix = - Felt::try_from(element.as_int() & SENDER_SUFFIX_MASK).expect("felt should still be valid"); + let sender_suffix = Felt::try_from(element.as_canonical_u64() & SENDER_SUFFIX_MASK) + .expect("felt should still be valid"); Ok((sender_suffix, note_type)) } @@ -331,8 +402,8 @@ fn merge_attachment_kind_scheme( fn unmerge_attachment_kind_scheme( element: Felt, ) -> Result<(NoteAttachmentKind, NoteAttachmentScheme), NoteError> { - let attachment_scheme = element.as_int() as u32; - let attachment_kind = (element.as_int() >> 32) as u8; + let attachment_scheme = element.as_canonical_u64() as u32; + let attachment_kind = (element.as_canonical_u64() >> 32) as u8; let attachment_scheme = NoteAttachmentScheme::new(attachment_scheme); let attachment_kind = NoteAttachmentKind::try_from(attachment_kind).map_err(|source| { diff --git a/crates/miden-protocol/src/note/mod.rs b/crates/miden-protocol/src/note/mod.rs index 3c09e7780f..f209a07a71 100644 --- a/crates/miden-protocol/src/note/mod.rs +++ b/crates/miden-protocol/src/note/mod.rs @@ -1,10 +1,15 @@ use miden_crypto::Word; -use miden_crypto::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use crate::account::AccountId; use crate::errors::NoteError; -use crate::{Felt, Hasher, WORD_SIZE, ZERO}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, ZERO}; mod assets; pub use assets::NoteAssets; @@ -19,7 +24,7 @@ mod storage; pub use storage::NoteStorage; mod metadata; -pub use metadata::NoteMetadata; +pub use metadata::{NoteMetadata, NoteMetadataHeader}; mod attachment; pub use attachment::{ @@ -163,6 +168,21 @@ impl Note { pub fn commitment(&self) -> Word { self.header.commitment() } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Reduces the size of the note script by stripping all debug info from it. + pub fn minify_script(&mut self) { + self.details.minify_script(); + } + + /// Consumes self and returns the underlying parts of the [`Note`]. + pub fn into_parts(self) -> (NoteAssets, NoteMetadata, NoteRecipient) { + let (assets, recipient) = self.details.into_parts(); + let metadata = self.header.into_metadata(); + (assets, metadata, recipient) + } } // AS REF @@ -219,6 +239,10 @@ impl Serializable for Note { header.metadata().write_into(target); details.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.header.metadata().get_size_hint() + self.details.get_size_hint() + } } impl Deserializable for Note { diff --git a/crates/miden-protocol/src/note/note_id.rs b/crates/miden-protocol/src/note/note_id.rs index 1d522757de..343285a81e 100644 --- a/crates/miden-protocol/src/note/note_id.rs +++ b/crates/miden-protocol/src/note/note_id.rs @@ -71,6 +71,10 @@ impl Serializable for NoteId { fn write_into(&self, target: &mut W) { target.write_bytes(&self.0.to_bytes()); } + + fn get_size_hint(&self) -> usize { + Word::SERIALIZED_SIZE + } } impl Deserializable for NoteId { diff --git a/crates/miden-protocol/src/note/note_tag.rs b/crates/miden-protocol/src/note/note_tag.rs index 1378ab2d77..2611f6988b 100644 --- a/crates/miden-protocol/src/note/note_tag.rs +++ b/crates/miden-protocol/src/note/note_tag.rs @@ -156,6 +156,10 @@ impl Serializable for NoteTag { fn write_into(&self, target: &mut W) { self.as_u32().write_into(target); } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteTag { diff --git a/crates/miden-protocol/src/note/note_type.rs b/crates/miden-protocol/src/note/note_type.rs index 86b492786d..d72b953f86 100644 --- a/crates/miden-protocol/src/note/note_type.rs +++ b/crates/miden-protocol/src/note/note_type.rs @@ -85,7 +85,7 @@ impl TryFrom for NoteType { type Error = NoteError; fn try_from(value: Felt) -> Result { - value.as_int().try_into() + value.as_canonical_u64().try_into() } } @@ -108,6 +108,10 @@ impl Serializable for NoteType { fn write_into(&self, target: &mut W) { (*self as u8).write_into(target) } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteType { diff --git a/crates/miden-protocol/src/note/nullifier.rs b/crates/miden-protocol/src/note/nullifier.rs index 1f3f3f4736..2f728b4123 100644 --- a/crates/miden-protocol/src/note/nullifier.rs +++ b/crates/miden-protocol/src/note/nullifier.rs @@ -1,6 +1,7 @@ use alloc::string::String; use core::fmt::{Debug, Display, Formatter}; +use miden_core::WORD_SIZE; use miden_crypto::WordError; use miden_protocol_macros::WordWrapper; @@ -13,7 +14,6 @@ use super::{ Hasher, NoteDetails, Serializable, - WORD_SIZE, Word, ZERO, }; @@ -65,7 +65,7 @@ impl Nullifier { /// /// Nullifier prefix is defined as the 16 most significant bits of the nullifier value. pub fn prefix(&self) -> u16 { - (self.as_word()[3].as_int() >> NULLIFIER_PREFIX_SHIFT) as u16 + (self.as_word()[3].as_canonical_u64() >> NULLIFIER_PREFIX_SHIFT) as u16 } /// Creates a Nullifier from a hex string. Assumes that the string starts with "0x" and @@ -78,8 +78,6 @@ impl Nullifier { #[cfg(any(feature = "testing", test))] pub fn dummy(n: u64) -> Self { - use miden_core::FieldElement; - Self(Word::new([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(n)])) } } @@ -117,6 +115,10 @@ impl Serializable for Nullifier { fn write_into(&self, target: &mut W) { target.write_bytes(&self.0.to_bytes()); } + + fn get_size_hint(&self) -> usize { + Word::SERIALIZED_SIZE + } } impl Deserializable for Nullifier { diff --git a/crates/miden-protocol/src/note/partial.rs b/crates/miden-protocol/src/note/partial.rs index f7aea1bcc6..2cfd911c6a 100644 --- a/crates/miden-protocol/src/note/partial.rs +++ b/crates/miden-protocol/src/note/partial.rs @@ -75,6 +75,10 @@ impl Serializable for PartialNote { self.recipient_digest.write_into(target); self.assets.write_into(target) } + + fn get_size_hint(&self) -> usize { + self.metadata().get_size_hint() + Word::SERIALIZED_SIZE + self.assets.get_size_hint() + } } impl Deserializable for PartialNote { diff --git a/crates/miden-protocol/src/note/recipient.rs b/crates/miden-protocol/src/note/recipient.rs index 9948cfeb39..36e20db762 100644 --- a/crates/miden-protocol/src/note/recipient.rs +++ b/crates/miden-protocol/src/note/recipient.rs @@ -60,6 +60,19 @@ impl NoteRecipient { pub fn digest(&self) -> Word { self.digest } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Reduces the size of the note script by stripping all debug info from it. + pub fn minify_script(&mut self) { + self.script.clear_debug_info(); + } + + /// Consumes self and returns the underlying parts of the [`NoteRecipient`]. + pub fn into_parts(self) -> (Word, NoteScript, NoteStorage) { + (self.serial_num, self.script, self.storage) + } } fn compute_recipient_digest(serial_num: Word, script: &NoteScript, storage: &NoteStorage) -> Word { @@ -87,6 +100,10 @@ impl Serializable for NoteRecipient { storage.write_into(target); serial_num.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.script.get_size_hint() + self.storage.get_size_hint() + Word::SERIALIZED_SIZE + } } impl Deserializable for NoteRecipient { diff --git a/crates/miden-protocol/src/note/script.rs b/crates/miden-protocol/src/note/script.rs index f9ee5e628a..8b54ce67be 100644 --- a/crates/miden-protocol/src/note/script.rs +++ b/crates/miden-protocol/src/note/script.rs @@ -2,8 +2,10 @@ use alloc::string::ToString; use alloc::sync::Arc; use alloc::vec::Vec; use core::fmt::Display; +use core::num::TryFromIntError; -use miden_processor::MastNodeExt; +use miden_core::mast::MastNodeExt; +use miden_mast_package::{MastArtifact, Package}; use super::Felt; use crate::assembly::mast::{ExternalNodeBuilder, MastForest, MastForestContributor, MastNodeId}; @@ -138,6 +140,35 @@ impl NoteScript { Ok(Self { mast: Arc::new(mast), entrypoint }) } + /// Creates an [`NoteScript`] from a [`Package`]. + /// + /// # Arguments + /// + /// * `package` - The package containing the + /// [`Executable`](miden_mast_package::MastArtifact::Executable) or + /// [`Library`](miden_mast_package::MastArtifact::Library). + /// + /// # Errors + /// + /// Returns an error if: + /// - The package contains a library which does not contain a procedure with the `@note_script` + /// attribute. + /// - The package contains a library which contains multiple procedures with the `@note_script` + /// attribute. + pub fn from_package(package: &Package) -> Result { + match &package.mast { + // `NoteScript`s are compiled as executables by the miden compiler's + // cargo extension. Source, the "midenc_flags_from_target" function: + // https://github.com/0xMiden/compiler/blob/d3cd8cd4a2c1dfeae8a61643aa42734a35e3e840/tools/cargo-miden/src/commands/build.rs#L334 + MastArtifact::Executable(executable) => { + let program = executable.as_ref().clone(); + + Ok(NoteScript::new(program)) + }, + MastArtifact::Library(library) => Ok(NoteScript::from_library(library))?, + } + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- @@ -156,6 +187,16 @@ impl NoteScript { self.entrypoint } + /// Clears all debug info from this script's [`MastForest`]: decorators, error codes, and + /// procedure names. + /// + /// See [`MastForest::clear_debug_info`] for more details. + pub fn clear_debug_info(&mut self) { + let mut mast = self.mast.clone(); + Arc::make_mut(&mut mast).clear_debug_info(); + self.mast = mast; + } + /// Returns a new [NoteScript] with the provided advice map entries merged into the /// underlying [MastForest]. /// @@ -232,16 +273,23 @@ impl TryFrom<&[Felt]> for NoteScript { return Err(DeserializationError::UnexpectedEOF); } - let entrypoint: u32 = elements[0].try_into().map_err(DeserializationError::InvalidValue)?; - let len = elements[1].as_int(); + let entrypoint: u32 = elements[0] + .as_canonical_u64() + .try_into() + .map_err(|err: TryFromIntError| DeserializationError::InvalidValue(err.to_string()))?; + let len = elements[1].as_canonical_u64(); let mut data = Vec::with_capacity(elements.len() * 4); for &felt in &elements[2..] { - let v: u32 = felt.try_into().map_err(DeserializationError::InvalidValue)?; - data.extend(v.to_le_bytes()) + let element: u32 = + felt.as_canonical_u64().try_into().map_err(|err: TryFromIntError| { + DeserializationError::InvalidValue(err.to_string()) + })?; + data.extend(element.to_le_bytes()) } data.shrink_to(len as usize); + // TODO: Use UntrustedMastForest and check where else we deserialize mast forests. let mast = MastForest::read_from_bytes(&data)?; let entrypoint = MastNodeId::from_u32_safe(entrypoint, &mast)?; Ok(NoteScript::from_parts(Arc::new(mast), entrypoint)) @@ -264,6 +312,16 @@ impl Serializable for NoteScript { self.mast.write_into(target); target.write_u32(u32::from(self.entrypoint)); } + + fn get_size_hint(&self) -> usize { + // TODO: this is a temporary workaround. Replace mast.to_bytes().len() with + // MastForest::get_size_hint() (or a similar size-hint API) once it becomes + // available. + let mast_size = self.mast.to_bytes().len(); + let u32_size = 0u32.get_size_hint(); + + mast_size + u32_size + } } impl Deserializable for NoteScript { @@ -322,8 +380,8 @@ mod tests { #[test] fn test_note_script_to_from_felt() { let assembler = Assembler::default(); - let tx_script_src = DEFAULT_NOTE_CODE; - let program = assembler.assemble_program(tx_script_src).unwrap(); + let script_src = DEFAULT_NOTE_CODE; + let program = assembler.assemble_program(script_src).unwrap(); let note_script = NoteScript::new(program); let encoded: Vec = (¬e_script).into(); @@ -334,7 +392,9 @@ mod tests { #[test] fn test_note_script_with_advice_map() { - use miden_core::{AdviceMap, Word}; + use miden_core::advice::AdviceMap; + + use crate::Word; let assembler = Assembler::default(); let program = assembler.assemble_program("begin nop end").unwrap(); diff --git a/crates/miden-protocol/src/note/storage.rs b/crates/miden-protocol/src/note/storage.rs index 14567f843a..0b8b73a976 100644 --- a/crates/miden-protocol/src/note/storage.rs +++ b/crates/miden-protocol/src/note/storage.rs @@ -19,7 +19,7 @@ use crate::{Felt, Hasher, MAX_NOTE_STORAGE_ITEMS, Word}; /// field element. Thus, note storage can contain up to ~8 KB of data. /// /// All storage items associated with a note can be reduced to a single commitment which is -/// computed as an RPO256 hash over the storage elements. +/// computed as sequential hash over the storage elements. #[derive(Clone, Debug)] pub struct NoteStorage { items: Vec, @@ -121,12 +121,17 @@ impl Serializable for NoteStorage { target.write_u16(items.len().try_into().expect("storage items len is not a u16 value")); target.write_many(items); } + + fn get_size_hint(&self) -> usize { + // 2 bytes for u16 length + 8 bytes per Felt + 2 + self.items.len() * 8 + } } impl Deserializable for NoteStorage { fn read_from(source: &mut R) -> Result { let len = source.read_u16()? as usize; - let items = source.read_many::(len)?; + let items = source.read_many_iter(len)?.collect::, _>>()?; Self::new(items).map_err(|v| DeserializationError::InvalidValue(format!("{v}"))) } } diff --git a/crates/miden-protocol/src/testing/account_code.rs b/crates/miden-protocol/src/testing/account_code.rs index 6fecd9fe48..ea98aeb1cf 100644 --- a/crates/miden-protocol/src/testing/account_code.rs +++ b/crates/miden-protocol/src/testing/account_code.rs @@ -23,8 +23,7 @@ impl AccountCode { let library = Assembler::default() .assemble_library([CODE]) .expect("mock account component should assemble"); - let metadata = - AccountComponentMetadata::new("miden::testing::mock").with_supports_all_types(); + let metadata = AccountComponentMetadata::new("miden::testing::mock", AccountType::all()); let component = AccountComponent::new(library, vec![], metadata).unwrap(); Self::from_components( diff --git a/crates/miden-protocol/src/testing/add_component.rs b/crates/miden-protocol/src/testing/add_component.rs index 6d8712a231..98dd2b8629 100644 --- a/crates/miden-protocol/src/testing/add_component.rs +++ b/crates/miden-protocol/src/testing/add_component.rs @@ -1,5 +1,5 @@ -use crate::account::AccountComponent; use crate::account::component::AccountComponentMetadata; +use crate::account::{AccountComponent, AccountType}; use crate::assembly::{Assembler, Library}; use crate::utils::sync::LazyLock; @@ -25,9 +25,8 @@ pub struct AddComponent; impl From for AccountComponent { fn from(_: AddComponent) -> Self { - let metadata = AccountComponentMetadata::new("miden::testing::add") - .with_description("Add component for testing") - .with_supports_all_types(); + let metadata = AccountComponentMetadata::new("miden::testing::add", AccountType::all()) + .with_description("Add component for testing"); AccountComponent::new(ADD_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") diff --git a/crates/miden-protocol/src/testing/asset.rs b/crates/miden-protocol/src/testing/asset.rs index b1b12223bd..a89be85545 100644 --- a/crates/miden-protocol/src/testing/asset.rs +++ b/crates/miden-protocol/src/testing/asset.rs @@ -1,97 +1,15 @@ -use rand::Rng; -use rand::distr::StandardUniform; - -use crate::account::{AccountId, AccountIdPrefix, AccountType}; +use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; -use crate::errors::AssetError; use crate::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, }; -/// Builder for an `NonFungibleAssetDetails`, the builder can be configured and used multiplied -/// times. -#[derive(Debug, Clone)] -pub struct NonFungibleAssetDetailsBuilder { - faucet_id: AccountIdPrefix, - rng: T, -} - -/// Builder for an `FungibleAsset`, the builder can be configured and used multiplied times. -#[derive(Debug, Clone)] -pub struct FungibleAssetBuilder { - faucet_id: AccountId, - amount: u64, -} - -impl NonFungibleAssetDetailsBuilder { - pub fn new(faucet_id: AccountIdPrefix, rng: T) -> Result { - if !matches!(faucet_id.account_type(), AccountType::NonFungibleFaucet) { - return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); - } - - Ok(Self { faucet_id, rng }) - } - - pub fn build(&mut self) -> Result { - let data = (&mut self.rng).sample_iter(StandardUniform).take(5).collect(); - NonFungibleAssetDetails::new(self.faucet_id, data) - } -} - -/// Builder for an `NonFungibleAsset`, the builder can be configured and used multiplied times. -#[derive(Debug, Clone)] -pub struct NonFungibleAssetBuilder { - details_builder: NonFungibleAssetDetailsBuilder, -} - -impl NonFungibleAssetBuilder { - pub fn new(faucet_id: AccountIdPrefix, rng: T) -> Result { - let details_builder = NonFungibleAssetDetailsBuilder::new(faucet_id, rng)?; - Ok(Self { details_builder }) - } - - pub fn build(&mut self) -> Result { - let details = self.details_builder.build()?; - NonFungibleAsset::new(&details) - } -} - -impl FungibleAssetBuilder { - pub const DEFAULT_AMOUNT: u64 = 10; - - pub fn new(faucet_id: AccountId) -> Result { - let account_type = faucet_id.account_type(); - if !matches!(account_type, AccountType::FungibleFaucet) { - return Err(AssetError::FungibleFaucetIdTypeMismatch(faucet_id)); - } - - Ok(Self { faucet_id, amount: Self::DEFAULT_AMOUNT }) - } - - pub fn amount(&mut self, amount: u64) -> Result<&mut Self, AssetError> { - if amount > FungibleAsset::MAX_AMOUNT { - return Err(AssetError::FungibleAssetAmountTooBig(amount)); - } - - self.amount = amount; - Ok(self) - } - - pub fn with_amount(&self, amount: u64) -> Result { - FungibleAsset::new(self.faucet_id, amount) - } - - pub fn build(&self) -> Result { - FungibleAsset::new(self.faucet_id, self.amount) - } -} - impl NonFungibleAsset { /// Returns a mocked non-fungible asset, issued by [ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET]. pub fn mock(asset_data: &[u8]) -> Asset { let non_fungible_asset_details = NonFungibleAssetDetails::new( - AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET).unwrap().prefix(), + AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET).unwrap(), asset_data.to_vec(), ) .unwrap(); diff --git a/crates/miden-protocol/src/testing/block.rs b/crates/miden-protocol/src/testing/block.rs index dba54ec54a..cb4fbc446f 100644 --- a/crates/miden-protocol/src/testing/block.rs +++ b/crates/miden-protocol/src/testing/block.rs @@ -1,10 +1,10 @@ use miden_crypto::merkle::smt::Smt; #[cfg(not(target_family = "wasm"))] -use winter_rand_utils::rand_value; +use miden_crypto::rand::test_utils::rand_value; use crate::Word; use crate::account::Account; -use crate::block::account_tree::{AccountTree, account_id_to_smt_key}; +use crate::block::account_tree::{AccountIdKey, AccountTree}; use crate::block::{BlockHeader, BlockNumber, FeeParameters}; use crate::testing::account_id::ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET; use crate::testing::random_secret_key::random_secret_key; @@ -26,7 +26,7 @@ impl BlockHeader { let smt = Smt::with_entries( accounts .iter() - .map(|acct| (account_id_to_smt_key(acct.id()), acct.to_commitment())), + .map(|acct| (AccountIdKey::from(acct.id()).as_word(), acct.to_commitment())), ) .expect("failed to create account db"); let acct_db = AccountTree::new(smt).expect("failed to create account tree"); diff --git a/crates/miden-protocol/src/testing/component_metadata.rs b/crates/miden-protocol/src/testing/component_metadata.rs index f86e077421..2586d5e1dd 100644 --- a/crates/miden-protocol/src/testing/component_metadata.rs +++ b/crates/miden-protocol/src/testing/component_metadata.rs @@ -1,9 +1,10 @@ +use crate::account::AccountType; use crate::account::component::AccountComponentMetadata; impl AccountComponentMetadata { /// Creates a mock [`AccountComponentMetadata`] with the given name that supports all account /// types. pub fn mock(name: &str) -> Self { - AccountComponentMetadata::new(name).with_supports_all_types() + AccountComponentMetadata::new(name, AccountType::all()) } } diff --git a/crates/miden-protocol/src/testing/mock_util_lib.rs b/crates/miden-protocol/src/testing/mock_util_lib.rs deleted file mode 100644 index f9d454f5ee..0000000000 --- a/crates/miden-protocol/src/testing/mock_util_lib.rs +++ /dev/null @@ -1,47 +0,0 @@ -use miden_assembly::diagnostics::NamedSource; - -use crate::assembly::Library; -use crate::transaction::TransactionKernel; -use crate::utils::sync::LazyLock; - -const MOCK_UTIL_LIBRARY_CODE: &str = " - use miden::protocol::output_note - - #! Inputs: [] - #! Outputs: [note_idx] - pub proc create_default_note - push.1.2.3.4 # = RECIPIENT - push.2 # = NoteType::Private - push.0 # = NoteTag - # => [tag, note_type, RECIPIENT] - - exec.output_note::create - # => [note_idx] - end - - #! Inputs: [ASSET] - #! Outputs: [] - pub proc create_default_note_with_asset - exec.create_default_note - # => [note_idx, ASSET] - - movdn.4 - # => [ASSET, note_idx] - - exec.output_note::add_asset - # => [] - end -"; - -static MOCK_UTIL_LIBRARY: LazyLock = LazyLock::new(|| { - TransactionKernel::assembler() - .assemble_library([NamedSource::new("mock::util", MOCK_UTIL_LIBRARY_CODE)]) - .expect("mock util library should be valid") -}); - -/// Returns the mock test [`Library`] under the `mock::util` namespace. -/// -/// This provides convenient wrappers for testing purposes. -pub fn mock_util_library() -> Library { - MOCK_UTIL_LIBRARY.clone() -} diff --git a/crates/miden-protocol/src/testing/mod.rs b/crates/miden-protocol/src/testing/mod.rs index 9f518f7e5a..80dda62a68 100644 --- a/crates/miden-protocol/src/testing/mod.rs +++ b/crates/miden-protocol/src/testing/mod.rs @@ -7,11 +7,11 @@ pub mod block; pub mod block_note_tree; pub mod component_metadata; pub mod constants; -pub mod mock_util_lib; pub mod noop_auth_component; pub mod note; pub mod partial_blockchain; pub mod random_secret_key; pub mod slot_name; pub mod storage; +pub mod storage_map_key; pub mod tx; diff --git a/crates/miden-protocol/src/testing/noop_auth_component.rs b/crates/miden-protocol/src/testing/noop_auth_component.rs index ff88e55a8c..5a7880e7f8 100644 --- a/crates/miden-protocol/src/testing/noop_auth_component.rs +++ b/crates/miden-protocol/src/testing/noop_auth_component.rs @@ -1,5 +1,5 @@ -use crate::account::AccountComponent; use crate::account::component::AccountComponentMetadata; +use crate::account::{AccountComponent, AccountType}; use crate::assembly::{Assembler, Library}; use crate::utils::sync::LazyLock; @@ -7,6 +7,7 @@ use crate::utils::sync::LazyLock; // ================================================================================================ const NOOP_AUTH_CODE: &str = " + @auth_script pub proc auth_noop push.0 drop end @@ -25,9 +26,9 @@ pub struct NoopAuthComponent; impl From for AccountComponent { fn from(_: NoopAuthComponent) -> Self { - let metadata = AccountComponentMetadata::new("miden::testing::noop_auth") - .with_description("No-op auth component for testing") - .with_supports_all_types(); + let metadata = + AccountComponentMetadata::new("miden::testing::noop_auth", AccountType::all()) + .with_description("No-op auth component for testing"); AccountComponent::new(NOOP_AUTH_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") diff --git a/crates/miden-protocol/src/testing/storage.rs b/crates/miden-protocol/src/testing/storage.rs index c4c0fc47cb..bf32740a72 100644 --- a/crates/miden-protocol/src/testing/storage.rs +++ b/crates/miden-protocol/src/testing/storage.rs @@ -1,4 +1,3 @@ -use alloc::string::{String, ToString}; use alloc::vec::Vec; use miden_core::{Felt, Word}; @@ -8,11 +7,11 @@ use crate::account::{ AccountStorageDelta, StorageMap, StorageMapDelta, + StorageMapKey, StorageSlot, StorageSlotDelta, StorageSlotName, }; -use crate::note::NoteAssets; use crate::utils::sync::LazyLock; // ACCOUNT STORAGE DELTA @@ -132,19 +131,9 @@ impl AccountStorage { } pub fn mock_map() -> StorageMap { - StorageMap::with_entries(STORAGE_LEAVES_2).unwrap() + StorageMap::with_entries( + STORAGE_LEAVES_2.map(|(key, value)| (StorageMapKey::from_raw(key), value)), + ) + .unwrap() } } - -// UTILITIES -// -------------------------------------------------------------------------------------------- - -/// Returns a list of strings, one for each note asset. -pub fn prepare_assets(note_assets: &NoteAssets) -> Vec { - let mut assets = Vec::new(); - for &asset in note_assets.iter() { - let asset_word = Word::from(asset); - assets.push(asset_word.to_string()); - } - assets -} diff --git a/crates/miden-protocol/src/testing/storage_map_key.rs b/crates/miden-protocol/src/testing/storage_map_key.rs new file mode 100644 index 0000000000..36da9f1e93 --- /dev/null +++ b/crates/miden-protocol/src/testing/storage_map_key.rs @@ -0,0 +1,9 @@ +use crate::Word; +use crate::account::StorageMapKey; + +impl StorageMapKey { + /// Creates a [`StorageMapKey`] from an array of u32s for testing purposes. + pub fn from_array(array: [u32; 4]) -> Self { + Self::from_raw(Word::from(array)) + } +} diff --git a/crates/miden-protocol/src/transaction/executed_tx.rs b/crates/miden-protocol/src/transaction/executed_tx.rs index 4892709e59..19613a48e8 100644 --- a/crates/miden-protocol/src/transaction/executed_tx.rs +++ b/crates/miden-protocol/src/transaction/executed_tx.rs @@ -8,7 +8,7 @@ use super::{ InputNote, InputNotes, NoteId, - OutputNotes, + RawOutputNotes, TransactionArgs, TransactionId, TransactionOutputs, @@ -71,6 +71,7 @@ impl ExecutedTransaction { tx_outputs.account.to_commitment(), tx_inputs.input_notes().commitment(), tx_outputs.output_notes.commitment(), + tx_outputs.fee, ); Self { @@ -111,7 +112,7 @@ impl ExecutedTransaction { } /// Returns the notes created in this transaction. - pub fn output_notes(&self) -> &OutputNotes { + pub fn output_notes(&self) -> &RawOutputNotes { &self.tx_outputs.output_notes } diff --git a/crates/miden-protocol/src/transaction/inputs/account.rs b/crates/miden-protocol/src/transaction/inputs/account.rs index 859d7ca676..9f06662d8a 100644 --- a/crates/miden-protocol/src/transaction/inputs/account.rs +++ b/crates/miden-protocol/src/transaction/inputs/account.rs @@ -3,7 +3,13 @@ use crate::account::{AccountCode, AccountId, PartialAccount, PartialStorage}; use crate::asset::PartialVault; use crate::block::account_tree::AccountWitness; use crate::crypto::merkle::smt::{SmtProof, SmtProofError}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT INPUTS // ================================================================================================ @@ -97,15 +103,15 @@ mod tests { use alloc::vec::Vec; use miden_core::Felt; - use miden_core::utils::{Deserializable, Serializable}; use miden_crypto::merkle::SparseMerklePath; - use miden_processor::SMT_DEPTH; use crate::account::{Account, AccountCode, AccountId, AccountStorage, PartialAccount}; use crate::asset::AssetVault; use crate::block::account_tree::AccountWitness; + use crate::crypto::merkle::smt::SMT_DEPTH; use crate::testing::account_id::ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE; use crate::transaction::AccountInputs; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn serde_roundtrip() { diff --git a/crates/miden-protocol/src/transaction/inputs/mod.rs b/crates/miden-protocol/src/transaction/inputs/mod.rs index 197e35be15..c8e8d75777 100644 --- a/crates/miden-protocol/src/transaction/inputs/mod.rs +++ b/crates/miden-protocol/src/transaction/inputs/mod.rs @@ -1,9 +1,9 @@ use alloc::collections::{BTreeMap, BTreeSet}; +use alloc::sync::Arc; use alloc::vec::Vec; use core::fmt::Debug; -use miden_core::utils::{Deserializable, Serializable}; -use miden_crypto::merkle::smt::{LeafIndex, SmtLeaf, SmtProof}; +use miden_crypto::merkle::smt::{SmtLeaf, SmtProof}; use miden_crypto::merkle::{MerkleError, NodeIndex}; use super::PartialBlockchain; @@ -14,18 +14,25 @@ use crate::account::{ AccountStorageHeader, PartialAccount, PartialStorage, - StorageMap, + StorageMapKey, StorageMapWitness, StorageSlotId, StorageSlotName, }; use crate::asset::{Asset, AssetVaultKey, AssetWitness, PartialVault}; -use crate::block::account_tree::{AccountWitness, account_id_to_smt_index}; +use crate::block::account_tree::{AccountIdKey, AccountWitness}; use crate::block::{BlockHeader, BlockNumber}; use crate::crypto::merkle::SparseMerklePath; use crate::errors::{TransactionInputError, TransactionInputsExtractionError}; use crate::note::{Note, NoteInclusionProof}; -use crate::transaction::{TransactionAdviceInputs, TransactionArgs, TransactionScript}; +use crate::transaction::{TransactionArgs, TransactionScript}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Word}; #[cfg(test)] @@ -35,9 +42,10 @@ mod account; pub use account::AccountInputs; mod notes; -use miden_processor::{AdviceInputs, SMT_DEPTH}; pub use notes::{InputNote, InputNotes, ToInputNoteCommitments}; +use crate::vm::AdviceInputs; + // TRANSACTION INPUTS // ================================================================================================ @@ -117,9 +125,10 @@ impl TransactionInputs { for witness in witnesses { self.advice_inputs.store.extend(witness.authenticated_nodes()); let smt_proof = SmtProof::from(witness); - self.advice_inputs - .map - .extend([(smt_proof.leaf().hash(), smt_proof.leaf().to_elements())]); + self.advice_inputs.map.extend([( + smt_proof.leaf().hash(), + smt_proof.leaf().to_elements().collect::>(), + )]); } self @@ -236,10 +245,10 @@ impl TransactionInputs { pub fn read_storage_map_witness( &self, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> Result { // Convert map key into the index at which the key-value pair for this key is stored - let leaf_index = StorageMap::map_key_to_leaf_index(map_key); + let leaf_index = map_key.hash().to_leaf_index(); // Construct sparse Merkle path. let merkle_path = self.advice_inputs.store.get_path(map_root, leaf_index.into())?; @@ -252,7 +261,7 @@ impl TransactionInputs { .map .get(&merkle_node) .ok_or(TransactionInputsExtractionError::MissingVaultRoot)?; - let smt_leaf = smt_leaf_from_elements(smt_leaf_elements, leaf_index)?; + let smt_leaf = SmtLeaf::try_from_elements(smt_leaf_elements, leaf_index)?; // Construct SMT proof and witness. let smt_proof = SmtProof::new(sparse_path, smt_leaf)?; @@ -287,7 +296,7 @@ impl TransactionInputs { .map .get(&merkle_node) .ok_or(TransactionInputsExtractionError::MissingVaultRoot)?; - let smt_leaf = smt_leaf_from_elements(smt_leaf_elements, smt_index)?; + let smt_leaf = SmtLeaf::try_from_elements(smt_leaf_elements, smt_index)?; // Construct SMT proof and witness. let smt_proof = SmtProof::new(sparse_path, smt_leaf)?; @@ -342,25 +351,25 @@ impl TransactionInputs { .map .get(&merkle_node) .ok_or(TransactionInputsExtractionError::MissingVaultRoot)?; - let smt_leaf = smt_leaf_from_elements(smt_leaf_elements, smt_index)?; + let smt_leaf = SmtLeaf::try_from_elements(smt_leaf_elements, smt_index)?; // Find the asset in the SMT leaf let asset = smt_leaf .entries() .iter() - .find(|(key, _value)| key == asset_key.as_word()) - .map(|(_key, value)| Asset::try_from(value)) + .find(|(key, _value)| key == &asset_key.to_word()) + .map(|(_key, value)| Asset::from_key_value(asset_key, *value)) .transpose()?; Ok(asset) } - /// Reads AccountInputs for a foreign account from the advice inputs. + /// Reads `AccountInputs` for a foreign account from the advice inputs. /// - /// This function reverses the process of [`TransactionAdviceInputs::add_foreign_accounts`] by: + /// This function reverses the process of `TransactionAdviceInputs::add_foreign_accounts` by: /// 1. Reading the account header from the advice map using the account_id_key. - /// 2. Building a PartialAccount from the header and foreign account code. - /// 3. Creating an AccountWitness. + /// 2. Building a `PartialAccount` from the header and foreign account code. + /// 3. Creating an `AccountWitness`. pub fn read_foreign_account_inputs( &self, account_id: AccountId, @@ -370,11 +379,11 @@ impl TransactionInputs { } // Read the account header elements from the advice map. - let account_id_key = TransactionAdviceInputs::account_id_map_key(account_id); + let account_id_key = AccountIdKey::from(account_id); let header_elements = self .advice_inputs .map - .get(&account_id_key) + .get(&account_id_key.as_word()) .ok_or(TransactionInputsExtractionError::ForeignAccountNotFound(account_id))?; // Parse the header from elements. @@ -440,7 +449,7 @@ impl TransactionInputs { ) -> Result { // Get the account tree root from the block header. let account_tree_root = self.block_header.account_root(); - let leaf_index: NodeIndex = account_id_to_smt_index(header.id()).into(); + let leaf_index = AccountIdKey::from(header.id()).to_leaf_index().into(); // Get the Merkle path from the merkle store. let merkle_path = self.advice_inputs.store.get_path(account_tree_root, leaf_index)?; @@ -487,7 +496,7 @@ impl TransactionInputs { // ================================================================================================ impl Serializable for TransactionInputs { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.account.write_into(target); self.block_header.write_into(target); self.blockchain.write_into(target); @@ -500,9 +509,7 @@ impl Serializable for TransactionInputs { } impl Deserializable for TransactionInputs { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let account = PartialAccount::read_from(source)?; let block_header = BlockHeader::read_from(source)?; let blockchain = PartialBlockchain::read_from(source)?; @@ -529,58 +536,6 @@ impl Deserializable for TransactionInputs { // HELPER FUNCTIONS // ================================================================================================ -// TODO(sergerad): Move this fn to crypto SmtLeaf::try_from_elements. -pub fn smt_leaf_from_elements( - elements: &[Felt], - leaf_index: LeafIndex, -) -> Result { - use miden_crypto::merkle::smt::SmtLeaf; - - // Based on the miden-crypto SMT leaf serialization format. - - if elements.is_empty() { - return Ok(SmtLeaf::new_empty(leaf_index)); - } - - // Elements should be organized into a contiguous array of K/V Words (4 Felts each). - if !elements.len().is_multiple_of(8) { - return Err(TransactionInputsExtractionError::LeafConversionError( - "invalid SMT leaf format: elements length must be divisible by 8".into(), - )); - } - - let num_entries = elements.len() / 8; - - if num_entries == 1 { - // Single entry. - let key = Word::new([elements[0], elements[1], elements[2], elements[3]]); - let value = Word::new([elements[4], elements[5], elements[6], elements[7]]); - Ok(SmtLeaf::new_single(key, value)) - } else { - // Multiple entries. - let mut entries = Vec::with_capacity(num_entries); - // Read k/v pairs from each entry. - for i in 0..num_entries { - let base_idx = i * 8; - let key = Word::new([ - elements[base_idx], - elements[base_idx + 1], - elements[base_idx + 2], - elements[base_idx + 3], - ]); - let value = Word::new([ - elements[base_idx + 4], - elements[base_idx + 5], - elements[base_idx + 6], - elements[base_idx + 7], - ]); - entries.push((key, value)); - } - let leaf = SmtLeaf::new_multiple(entries)?; - Ok(leaf) - } -} - /// Validates whether the provided note belongs to the note tree of the specified block. fn validate_is_in_block( note: &Note, diff --git a/crates/miden-protocol/src/transaction/inputs/notes.rs b/crates/miden-protocol/src/transaction/inputs/notes.rs index 638ae56eba..b48fe8741b 100644 --- a/crates/miden-protocol/src/transaction/inputs/notes.rs +++ b/crates/miden-protocol/src/transaction/inputs/notes.rs @@ -204,7 +204,7 @@ impl Serializable for InputNotes { impl Deserializable for InputNotes { fn read_from(source: &mut R) -> Result { let num_notes = source.read_u16()?; - let notes = source.read_many::(num_notes.into())?; + let notes = source.read_many_iter(num_notes.into())?.collect::, _>>()?; Self::new(notes).map_err(|err| DeserializationError::InvalidValue(format!("{err}"))) } } diff --git a/crates/miden-protocol/src/transaction/inputs/tests.rs b/crates/miden-protocol/src/transaction/inputs/tests.rs index 60b74fd00a..cc2fcee7a2 100644 --- a/crates/miden-protocol/src/transaction/inputs/tests.rs +++ b/crates/miden-protocol/src/transaction/inputs/tests.rs @@ -1,10 +1,9 @@ use alloc::string::ToString; +use alloc::sync::Arc; use alloc::vec; use std::collections::BTreeMap; use std::vec::Vec; -use miden_core::utils::{Deserializable, Serializable}; - use crate::account::{ AccountCode, AccountHeader, @@ -17,12 +16,14 @@ use crate::account::{ StorageSlotType, }; use crate::asset::PartialVault; +use crate::block::account_tree::AccountIdKey; use crate::errors::TransactionInputsExtractionError; use crate::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2, }; use crate::transaction::TransactionInputs; +use crate::utils::serde::{Deserializable, Serializable}; use crate::{Felt, Word}; #[test] @@ -119,9 +120,10 @@ fn test_read_foreign_account_inputs_with_storage_data() { // Create advice inputs with both account header and storage header. let mut advice_inputs = crate::vm::AdviceInputs::default(); - let account_id_key = - crate::transaction::TransactionAdviceInputs::account_id_map_key(foreign_account_id); - advice_inputs.map.insert(account_id_key, foreign_header.to_elements()); + let account_id_key = AccountIdKey::from(foreign_account_id); + advice_inputs + .map + .insert(account_id_key.as_word(), foreign_header.to_elements().to_vec()); advice_inputs .map .insert(foreign_header.storage_commitment(), foreign_storage_header.to_elements()); @@ -232,10 +234,10 @@ fn test_read_foreign_account_inputs_with_proper_witness() { let mut advice_inputs = crate::vm::AdviceInputs::default(); // Add account header to advice map. - let account_id_key = - crate::transaction::TransactionAdviceInputs::account_id_map_key(foreign_account_id); - advice_inputs.map.insert(account_id_key, foreign_header.to_elements().to_vec()); - + let account_id_key = AccountIdKey::from(foreign_account_id); + advice_inputs + .map + .insert(account_id_key.as_word(), foreign_header.to_elements().to_vec()); // Add storage header to advice map. advice_inputs .map @@ -246,7 +248,9 @@ fn test_read_foreign_account_inputs_with_proper_witness() { // Add the account leaf to the advice map (needed for witness verification). let leaf = foreign_witness.leaf(); - advice_inputs.map.insert(leaf.hash(), leaf.to_elements()); + advice_inputs + .map + .insert(leaf.hash(), leaf.to_elements().collect::>()); // Create block header with the account tree root. let block_header = crate::block::BlockHeader::mock(0, None, None, &[], account_tree_root); diff --git a/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs b/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs index 58e27cb649..295a3ef03b 100644 --- a/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs +++ b/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs @@ -1,9 +1,9 @@ use alloc::vec::Vec; -use miden_processor::AdviceMutation; +use miden_processor::advice::AdviceMutation; -use crate::account::{AccountHeader, AccountId, PartialAccount}; -use crate::block::account_tree::{AccountWitness, account_id_to_smt_key}; +use crate::account::{AccountHeader, PartialAccount}; +use crate::block::account_tree::{AccountIdKey, AccountWitness}; use crate::crypto::SequentialCommit; use crate::crypto::merkle::InnerNodeInfo; use crate::note::NoteAttachmentContent; @@ -15,7 +15,7 @@ use crate::transaction::{ TransactionKernel, }; use crate::vm::AdviceInputs; -use crate::{EMPTY_WORD, Felt, FieldElement, Word, ZERO}; +use crate::{EMPTY_WORD, Felt, Word, ZERO}; // TRANSACTION ADVICE INPUTS // ================================================================================================ @@ -56,8 +56,8 @@ impl TransactionAdviceInputs { // If a seed was provided, extend the map appropriately. if let Some(seed) = tx_inputs.account().seed() { // ACCOUNT_ID |-> ACCOUNT_SEED - let account_id_key = Self::account_id_map_key(partial_native_acc.id()); - inputs.add_map_entry(account_id_key, seed.to_vec()); + let account_id_key = AccountIdKey::from(partial_native_acc.id()); + inputs.add_map_entry(account_id_key.as_word(), seed.to_vec()); } // if the account is new, insert the storage map entries into the advice provider. @@ -104,14 +104,6 @@ impl TransactionAdviceInputs { // PUBLIC UTILITIES // -------------------------------------------------------------------------------------------- - /// Returns the advice map key where: - /// - the seed for native accounts is stored. - /// - the account header for foreign accounts is stored. - pub fn account_id_map_key(id: AccountId) -> Word { - // The format is equivalent to the SMT key format, so we avoid defining it twice. - account_id_to_smt_key(id) - } - // MUTATORS // -------------------------------------------------------------------------------------------- @@ -131,11 +123,11 @@ impl TransactionAdviceInputs { // for foreign accounts, we need to insert the id to state mapping // NOTE: keep this in sync with the account::load_from_advice procedure - let account_id_key = Self::account_id_map_key(foreign_acc.id()); + let account_id_key = AccountIdKey::from(foreign_acc.id()); let header = AccountHeader::from(foreign_acc.account()); // ACCOUNT_ID |-> [ID_AND_NONCE, VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] - self.add_map_entry(account_id_key, header.to_elements()); + self.add_map_entry(account_id_key.as_word(), header.to_elements()); } } @@ -178,13 +170,13 @@ impl TransactionAdviceInputs { self.extend_stack(header.validator_key().to_commitment()); self.extend_stack([ header.block_num().into(), - header.version().into(), - header.timestamp().into(), + Felt::from(header.version()), + Felt::from(header.timestamp()), ZERO, ]); self.extend_stack([ ZERO, - header.fee_parameters().verification_base_fee().into(), + Felt::from(header.fee_parameters().verification_base_fee()), header.fee_parameters().native_asset_id().suffix(), header.fee_parameters().native_asset_id().prefix().as_felt(), ]); @@ -282,13 +274,20 @@ impl TransactionAdviceInputs { // populate Merkle store and advice map with nodes info needed to access storage map entries self.extend_merkle_store(account.storage().inner_nodes()); - self.extend_map(account.storage().leaves().map(|leaf| (leaf.hash(), leaf.to_elements()))); + self.extend_map( + account + .storage() + .leaves() + .map(|leaf| (leaf.hash(), leaf.to_elements().collect())), + ); // --- account vault ------------------------------------------------------ // populate Merkle store and advice map with nodes info needed to access vault assets self.extend_merkle_store(account.vault().inner_nodes()); - self.extend_map(account.vault().leaves().map(|leaf| (leaf.hash(), leaf.to_elements()))); + self.extend_map( + account.vault().leaves().map(|leaf| (leaf.hash(), leaf.to_elements().collect())), + ); } /// Adds an account witness to the advice inputs. @@ -298,7 +297,7 @@ impl TransactionAdviceInputs { fn add_account_witness(&mut self, witness: &AccountWitness) { // populate advice map with the account's leaf let leaf = witness.leaf(); - self.add_map_entry(leaf.hash(), leaf.to_elements()); + self.add_map_entry(leaf.hash(), leaf.to_elements().collect()); // extend the merkle store and map with account witnesses merkle path self.extend_merkle_store(witness.authenticated_nodes()); @@ -312,13 +311,12 @@ impl TransactionAdviceInputs { /// The advice provider is populated with: /// /// - For each note: - /// - The note's details (serial number, script root, and its input / assets commitment). + /// - The note's details (serial number, script root, and its storage / assets commitment). /// - The note's private arguments. - /// - The note's public metadata. - /// - The note's public inputs data. Prefixed by its length and padded to an even word - /// length. - /// - The note's asset padded. Prefixed by its length and padded to an even word length. - /// - The note's script MAST forest's advice map inputs + /// - The note's public metadata (sender account ID, note type, note tag, attachment kind / + /// scheme and the attachment content). + /// - The note's storage (unpadded). + /// - The note's assets (key and value words). /// - For authenticated notes (determined by the `is_authenticated` flag): /// - The note's authentication path against its block's note tree. /// - The block number, sub commitment, note root. @@ -340,7 +338,7 @@ impl TransactionAdviceInputs { // recipient storage self.add_map_entry(recipient.storage().commitment(), recipient.storage().to_elements()); // assets commitments - self.add_map_entry(assets.commitment(), assets.to_padded_assets()); + self.add_map_entry(assets.commitment(), assets.to_elements()); // array attachments if let NoteAttachmentContent::Array(array_attachment) = note.metadata().attachment().content() @@ -357,11 +355,11 @@ impl TransactionAdviceInputs { note_data.extend(*recipient.storage().commitment()); note_data.extend(*assets.commitment()); note_data.extend(*note_arg); - note_data.extend(note.metadata().to_header_word()); note_data.extend(note.metadata().to_attachment_word()); - note_data.push(recipient.storage().num_items().into()); - note_data.push((assets.num_assets() as u32).into()); - note_data.extend(assets.to_padded_assets()); + note_data.extend(note.metadata().to_header_word()); + note_data.push(Felt::from(recipient.storage().num_items())); + note_data.push(Felt::from(assets.num_assets() as u32)); + note_data.extend(assets.to_elements()); // authentication vs unauthenticated match input_note { @@ -385,7 +383,7 @@ impl TransactionAdviceInputs { note_data.push(block_num.into()); note_data.extend(block_header.sub_commitment()); note_data.extend(block_header.note_root()); - note_data.push(proof.location().node_index_in_block().into()); + note_data.push(Felt::from(proof.location().node_index_in_block())); }, InputNote::Unauthenticated { .. } => { // push the `is_authenticated` flag diff --git a/crates/miden-protocol/src/transaction/kernel/memory.rs b/crates/miden-protocol/src/transaction/kernel/memory.rs index c23e26349e..95373d8788 100644 --- a/crates/miden-protocol/src/transaction/kernel/memory.rs +++ b/crates/miden-protocol/src/transaction/kernel/memory.rs @@ -21,9 +21,9 @@ pub type StorageSlot = u8; // | Kernel data | 1_600 | 140 | 34 procedures in total, 4 elements each | // | Accounts data | 8_192 | 524_288 | 64 accounts max, 8192 elements each | // | Account delta | 532_480 | 263 | | -// | Input notes | 4_194_304 | 2_162_688 | nullifiers data segment + 1024 input notes | -// | | | | max, 2048 elements each | -// | Output notes | 16_777_216 | 2_097_152 | 1024 output notes max, 2048 elements each | +// | Input notes | 4_194_304 | 3_211_264 | nullifiers data segment (2^16 elements) | +// | | | | + 1024 input notes max, 3072 elements each | +// | Output notes | 16_777_216 | 3_145_728 | 1024 output notes max, 3072 elements each | // | Link Map Memory | 33_554_432 | 33_554_432 | Enough for 2_097_151 key-value pairs | // Relative layout of one account @@ -40,9 +40,9 @@ pub type StorageSlot = u8; // | Padding | 1_056 | 4 | | // | Proc tracking | 1_060 | 256 | 256 procedures max, 1 element each | // | Num storage slots | 1_316 | 4 | | -// | Initial slot info | 1_320 | 1_020 | Only initialized on the native account | -// | Active slot info | 2_340 | 1_020 | 255 slots max, 8 elements each | -// | Padding | 3_360 | 4_832 | | +// | Initial slot info | 1_320 | 2_040 | Only initialized on the native account | +// | Active slot info | 3_360 | 2_040 | 255 slots max, 8 elements each | +// | Padding | 5_400 | 2_792 | | // // Storage slots are laid out as [[0, slot_type, slot_id_suffix, slot_id_prefix], SLOT_VALUE]. @@ -333,7 +333,7 @@ pub const ACCT_STORAGE_SLOT_VALUE_OFFSET: u8 = 4; /// the account data segment. /// /// This section contains the current values of the account storage slots. -pub const ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET: MemoryAddress = 2340; +pub const ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET: MemoryAddress = 3360; /// The memory address at which the account's active storage slots section begins in the native /// account. @@ -344,7 +344,7 @@ pub const NATIVE_ACCT_STORAGE_SLOTS_SECTION_PTR: MemoryAddress = // ================================================================================================ /// The size of the memory segment allocated to each note. -pub const NOTE_MEM_SIZE: MemoryAddress = 2048; +pub const NOTE_MEM_SIZE: MemoryAddress = 3072; #[allow(clippy::empty_line_after_outer_attr)] #[rustfmt::skip] @@ -358,29 +358,35 @@ pub const NOTE_MEM_SIZE: MemoryAddress = 2048; // │ NUM │ NOTE 0 │ NOTE 1 │ ... │ NOTE n │ PADDING │ NOTE 0 │ NOTE 1 │ ... │ NOTE n │ // │ NOTES │ NULLIFIER │ NULLIFIER │ │ NULLIFIER │ │ DATA │ DATA │ │ DATA │ // ├──────────┼───────────┼───────────┼─────┼────────────────┼─────────┼──────────┼────────┼───────┼────────┤ -// 4_194_304 4_194_308 4_194_312 4_194_304+4(n+1) 4_259_840 +2048 +4096 +2048n +// 4_194_304 4_194_308 4_194_312 4_194_304+4(n+1) 4_259_840 +3072 +6144 +3072n // // Here `n` represents number of input notes. // -// Each nullifier occupies a single word. A data section for each note consists of exactly 2048 +// Each nullifier occupies a single word. A data section for each note consists of exactly 3072 // elements and is laid out like so: // -// ┌──────┬────────┬────────┬─────────┬────────────┬───────────┬──────────┬────────────┬───────┬─────────┬────────┬───────┬─────┬───────┬─────────┬ -// │ NOTE │ SERIAL │ SCRIPT │ STORAGE │ ASSETS | RECIPIENT │ METADATA │ ATTACHMENT │ NOTE │ STORAGE │ NUM │ ASSET │ ... │ ASSET │ PADDING │ -// │ ID │ NUM │ ROOT │ COMM │ COMMITMENT | │ HEADER │ │ ARGS │ LENGTH │ ASSETS │ 0 │ │ n │ │ -// ├──────┼────────┼────────┼─────────┼────────────┼───────────┼──────────┼────────────┼───────┼─────────┼────────┼───────┼─────┼───────┼─────────┤ -// 0 4 8 12 16 20 24 28 32 36 40 44 + 4n +// ┌──────┬────────┬────────┬─────────┬────────────┬───────────┬──────────┬────────────┬───────┬ +// │ NOTE │ SERIAL │ SCRIPT │ STORAGE │ ASSETS │ RECIPIENT │ METADATA │ ATTACHMENT │ NOTE │ +// │ ID │ NUM │ ROOT │ COMM │ COMMITMENT │ │ HEADER │ │ ARGS │ +// ├──────┼────────┼────────┼─────────┼────────────┼───────────┼──────────┼────────────┼───────┼ +// 0 4 8 12 16 20 24 28 32 +// +// ┬─────────┬────────┬───────┬─────────┬─────┬────────┬─────────┬─────────┐ +// │ STORAGE │ NUM │ ASSET │ ASSET │ ... │ ASSET │ ASSET │ PADDING │ +// │ LENGTH │ ASSETS │ KEY 0 │ VALUE 0 │ │ KEY n │ VALUE n │ │ +// ┼─────────┼────────┼───────┼─────────┼─────┼────────┼─────────┼─────────┘ +// 36 40 44 48 44 + 8n 48 + 8n // // - NUM_STORAGE_ITEMS is encoded as [num_storage_items, 0, 0, 0]. // - NUM_ASSETS is encoded as [num_assets, 0, 0, 0]. // - STORAGE_COMMITMENT is the key to look up note storage in the advice map. // - ASSETS_COMMITMENT is the key to look up note assets in the advice map. // -// Notice that note storage values are not loaded to the memory, only their length. In order to obtain +// Notice that note storage item are not loaded to the memory, only their length. In order to obtain // the storage values the advice map should be used: they are stored there as // `STORAGE_COMMITMENT -> STORAGE`. // -// As opposed to the asset values, storage values are never used in kernel memory, so their presence +// As opposed to the asset values, storage items are never used in kernel memory, so their presence // there is unnecessary. /// The memory address at which the input note section begins. @@ -419,27 +425,29 @@ pub const INPUT_NOTE_ASSETS_OFFSET: MemoryOffset = 44; // ┌─────────────┬─────────────┬───────────────┬─────────────┐ // │ NOTE 0 DATA │ NOTE 1 DATA │ ... │ NOTE n DATA │ // └─────────────┴─────────────┴───────────────┴─────────────┘ -// 16_777_216 +2048 +4096 +2048n +// 16_777_216 +3072 +6144 +3072n // // The total number of output notes for a transaction is stored in the bookkeeping section of the // memory. Data section of each note is laid out like so: // -// ┌──────┬──────────┬────────────┬───────────┬────────────┬────────────────┬─────────┬─────┬─────────┬─────────┐ -// │ NOTE │ METADATA │ METADATA │ RECIPIENT │ ASSETS │ NUM ASSETS │ ASSET 0 │ ... │ ASSET n │ PADDING │ -// | ID | HEADER | ATTACHMENT | | COMMITMENT | AND DIRTY FLAG | | | | | -// ├──────┼──────────┼────────────┼───────────┼────────────┼────────────────┼─────────┼─────┼─────────┼─────────┤ -// 0 1 2 3 4 5 6 6 + n +// ┌──────┬──────────┬────────────┬───────────┬────────────┬────────┬───────┬ +// │ NOTE │ METADATA │ METADATA │ RECIPIENT │ ASSETS │ NUM │ DIRTY │ +// │ ID │ HEADER │ ATTACHMENT │ │ COMMITMENT │ ASSETS │ FLAG │ +// ├──────┼──────────┼────────────┼───────────┼────────────┼────────┼───────┼ +// 0 4 8 12 16 20 21 // -// The NUM_ASSETS_AND_DIRTY_FLAG word has the following layout: -// `[num_assets, assets_commitment_dirty_flag, 0, 0]`, where: -// - `num_assets` is the number of assets in this output note. -// - `assets_commitment_dirty_flag` is the binary flag which specifies whether the assets commitment -// stored in this note is outdated. It holds 1 if some changes were made to the note assets since -// the last re-computation, and 0 otherwise. +// ┬───────┬─────────┬─────┬────────┬─────────┬─────────┐ +// │ ASSET │ ASSET │ ... │ ASSET │ ASSET │ PADDING │ +// │ KEY 0 │ VALUE 0 │ │ KEY n │ VALUE n │ │ +// ┼───────┼─────────┼─────┼────────┼─────────┼─────────┘ +// 24 28 24 + 8n 28 + 8n // -// Dirty flag is set to 0 after every recomputation of the assets commitment in the -// `kernel::note::compute_output_note_assets_commitment` procedure. It is set to 1 in the -// `kernel::output_note::add_asset` procedure after any change was made to the assets data. +// The DIRTY_FLAG is the binary flag which specifies whether the assets commitment stored in this +// note is outdated. It holds 1 if some changes were made to the note assets since the last +// re-computation, and 0 otherwise. +// It is set to 0 after every recomputation of the assets commitment in the +// `$kernel::note::compute_output_note_assets_commitment` procedure. It is set to 1 in the +// `$kernel::output_note::add_asset` procedure after any change was made to the assets data. /// The memory address at which the output notes section begins. pub const OUTPUT_NOTE_SECTION_OFFSET: MemoryOffset = 16_777_216; @@ -454,6 +462,17 @@ pub const OUTPUT_NOTE_NUM_ASSETS_OFFSET: MemoryOffset = 20; pub const OUTPUT_NOTE_DIRTY_FLAG_OFFSET: MemoryOffset = 21; pub const OUTPUT_NOTE_ASSETS_OFFSET: MemoryOffset = 24; +// ASSETS +// ------------------------------------------------------------------------------------------------ + +/// The size of an asset's memory representation. +#[cfg(any(feature = "testing", test))] +pub const ASSET_SIZE: MemoryOffset = 8; + +/// The offset of the asset value in an asset's memory representation. +#[cfg(any(feature = "testing", test))] +pub const ASSET_VALUE_OFFSET: MemoryOffset = 4; + // LINK MAP // ------------------------------------------------------------------------------------------------ diff --git a/crates/miden-protocol/src/transaction/kernel/mod.rs b/crates/miden-protocol/src/transaction/kernel/mod.rs index 334376165a..2518650424 100644 --- a/crates/miden-protocol/src/transaction/kernel/mod.rs +++ b/crates/miden-protocol/src/transaction/kernel/mod.rs @@ -1,4 +1,3 @@ -use alloc::string::ToString; use alloc::sync::Arc; use alloc::vec::Vec; @@ -14,13 +13,15 @@ use crate::block::BlockNumber; use crate::crypto::SequentialCommit; use crate::errors::TransactionOutputError; use crate::protocol::ProtocolLib; -use crate::transaction::{OutputNote, OutputNotes, TransactionInputs, TransactionOutputs}; +use crate::transaction::{RawOutputNote, RawOutputNotes, TransactionInputs, TransactionOutputs}; use crate::utils::serde::Deserializable; use crate::utils::sync::LazyLock; use crate::vm::{AdviceInputs, Program, ProgramInfo, StackInputs, StackOutputs}; use crate::{Felt, Hasher, Word}; -mod procedures; +mod procedures { + include!(concat!(env!("OUT_DIR"), "/procedures.rs")); +} pub mod memory; @@ -160,7 +161,7 @@ impl TransactionKernel { /// BLOCK_COMMITMENT, /// INITIAL_ACCOUNT_COMMITMENT, /// INPUT_NOTES_COMMITMENT, - /// account_id_prefix, account_id_suffix, block_num + /// account_id_suffix, account_id_prefix, block_num /// ] /// ``` /// @@ -181,15 +182,14 @@ impl TransactionKernel { ) -> StackInputs { // Note: Must be kept in sync with the transaction's kernel prepare_transaction procedure let mut inputs: Vec = Vec::with_capacity(14); - inputs.push(Felt::from(block_num)); + inputs.extend_from_slice(block_commitment.as_elements()); + inputs.extend_from_slice(initial_account_commitment.as_elements()); + inputs.extend(input_notes_commitment); inputs.push(account_id.suffix()); inputs.push(account_id.prefix().as_felt()); - inputs.extend(input_notes_commitment); - inputs.extend_from_slice(initial_account_commitment.as_elements()); - inputs.extend_from_slice(block_commitment.as_elements()); - StackInputs::new(inputs) - .map_err(|e| e.to_string()) - .expect("Invalid stack input") + inputs.push(Felt::from(block_num)); + + StackInputs::new(&inputs).expect("number of stack inputs should be <= 16") } /// Builds the stack for expected transaction execution outputs. @@ -199,8 +199,7 @@ impl TransactionKernel { /// [ /// OUTPUT_NOTES_COMMITMENT, /// ACCOUNT_UPDATE_COMMITMENT, - /// FEE_ASSET, - /// expiration_block_num, + /// native_asset_id_suffix, native_asset_id_prefix, fee_amount, expiration_block_num /// ] /// ``` /// @@ -219,15 +218,16 @@ impl TransactionKernel { ) -> StackOutputs { let account_update_commitment = Hasher::merge(&[final_account_commitment, account_delta_commitment]); - let mut outputs: Vec = Vec::with_capacity(9); - outputs.push(Felt::from(expiration_block_num)); - outputs.extend(Word::from(fee)); - outputs.extend(account_update_commitment); + + let mut outputs: Vec = Vec::with_capacity(12); outputs.extend(output_notes_commitment); - outputs.reverse(); - StackOutputs::new(outputs) - .map_err(|e| e.to_string()) - .expect("Invalid stack output") + outputs.extend(account_update_commitment); + outputs.push(fee.faucet_id().suffix()); + outputs.push(fee.faucet_id().prefix().as_felt()); + outputs.push(Felt::try_from(fee.amount().inner()).expect("amount should fit into felt")); + outputs.push(Felt::from(expiration_block_num)); + + StackOutputs::new(&outputs).expect("number of stack inputs should be <= 16") } /// Extracts transaction output data from the provided stack outputs. @@ -261,22 +261,28 @@ impl TransactionKernel { stack: &StackOutputs, // FIXME TODO add an extension trait for this one ) -> Result<(Word, Word, FungibleAsset, BlockNumber), TransactionOutputError> { let output_notes_commitment = stack - .get_stack_word_be(TransactionOutputs::OUTPUT_NOTES_COMMITMENT_WORD_IDX * 4) + .get_word(TransactionOutputs::OUTPUT_NOTES_COMMITMENT_WORD_IDX) .expect("output_notes_commitment (first word) missing"); let account_update_commitment = stack - .get_stack_word_be(TransactionOutputs::ACCOUNT_UPDATE_COMMITMENT_WORD_IDX * 4) + .get_word(TransactionOutputs::ACCOUNT_UPDATE_COMMITMENT_WORD_IDX) .expect("account_update_commitment (second word) missing"); - let fee = stack - .get_stack_word_be(TransactionOutputs::FEE_ASSET_WORD_IDX * 4) - .expect("fee_asset (third word) missing"); + let native_asset_id_prefix = stack + .get_element(TransactionOutputs::NATIVE_ASSET_ID_PREFIX_ELEMENT_IDX) + .expect("native_asset_id_prefix missing"); + let native_asset_id_suffix = stack + .get_element(TransactionOutputs::NATIVE_ASSET_ID_SUFFIX_ELEMENT_IDX) + .expect("native_asset_id_suffix missing"); + let fee_amount = stack + .get_element(TransactionOutputs::FEE_AMOUNT_ELEMENT_IDX) + .expect("fee_amount missing"); let expiration_block_num = stack - .get_stack_item(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) - .expect("tx_expiration_block_num (element on index 12) missing"); + .get_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) + .expect("tx_expiration_block_num missing"); - let expiration_block_num = u32::try_from(expiration_block_num.as_int()) + let expiration_block_num = u32::try_from(expiration_block_num.as_canonical_u64()) .map_err(|_| { TransactionOutputError::OutputStackInvalid( "expiration block number should be smaller than u32::MAX".into(), @@ -286,7 +292,7 @@ impl TransactionKernel { // Make sure that indices 13, 14 and 15 are zeroes (i.e. the fourth word without the // expiration block number). - if stack.get_stack_word_be(12).expect("fourth word missing").as_elements()[..3] + if stack.get_word(12).expect("fourth word missing").as_elements()[..3] != Word::empty().as_elements()[..3] { return Err(TransactionOutputError::OutputStackInvalid( @@ -294,7 +300,10 @@ impl TransactionKernel { )); } - let fee = FungibleAsset::try_from(fee) + let native_asset_id = + AccountId::try_from_elements(native_asset_id_suffix, native_asset_id_prefix) + .expect("native asset ID should be validated by the tx kernel"); + let fee = FungibleAsset::new(native_asset_id, fee_amount.as_canonical_u64()) .map_err(TransactionOutputError::FeeAssetNotFungibleAsset)?; Ok((output_notes_commitment, account_update_commitment, fee, expiration_block_num)) @@ -332,7 +341,7 @@ impl TransactionKernel { pub fn from_transaction_parts( stack: &StackOutputs, advice_inputs: &AdviceInputs, - output_notes: Vec, + output_notes: Vec, ) -> Result { let (output_notes_commitment, account_update_commitment, fee, expiration_block_num) = Self::parse_output_stack(stack)?; @@ -350,7 +359,7 @@ impl TransactionKernel { .map_err(TransactionOutputError::FinalAccountHeaderParseFailure)?; // validate output notes - let output_notes = OutputNotes::new(output_notes)?; + let output_notes = RawOutputNotes::new(output_notes)?; if output_notes_commitment != output_notes.commitment() { return Err(TransactionOutputError::OutputNotesCommitmentInconsistent { actual: output_notes.commitment(), diff --git a/crates/miden-protocol/src/transaction/kernel/procedures.rs b/crates/miden-protocol/src/transaction/kernel/procedures.rs deleted file mode 100644 index 5a1409ec8f..0000000000 --- a/crates/miden-protocol/src/transaction/kernel/procedures.rs +++ /dev/null @@ -1,110 +0,0 @@ -// This file is generated by build.rs, do not modify - -use crate::{Word, word}; - -// KERNEL PROCEDURES -// ================================================================================================ - -/// Hashes of all dynamically executed kernel procedures. -pub const KERNEL_PROCEDURES: [Word; 50] = [ - // account_get_initial_commitment - word!("0x428fc7b1fda493bfda62f4f775dea0b5d2f67fced462b677f3e2cbd801eb6c24"), - // account_compute_commitment - word!("0xeae60b8eea84883c07323f92dbbcc9c6d579056968ae3f5bdc1f23028494056b"), - // account_get_id - word!("0x2dda3cf687eabd92b3d00a8a47afd10eacc5cc97e533999629992c7529a1052a"), - // account_get_nonce - word!("0x510bc00835f2ba7865acbc1153496b14dec3bff0d62d210f833e464e0ceb57b3"), - // account_incr_nonce - word!("0xc5e167d1d9e52a1bb0f8309b99b0867137d86f45bde872c1bee753b197cf6e2b"), - // account_get_code_commitment - word!("0xfad445052d9bc370550060fa2e5baad7b2cec350306c580cad8c94f57f4fc936"), - // account_get_initial_storage_commitment - word!("0xcf3f199f8b679d6a18fa3c581d19c9644d86a4ece01ce0a4b1841be584237586"), - // account_compute_storage_commitment - word!("0x398d217aa18f0a3ce52198ca30d8b97edae88f0f247d4cbfa7073dc01dcfd1fa"), - // account_get_item - word!("0x6c4a309164216cd0ade284be5dafdf908c843415ea4e3c8076da222a35242ddf"), - // account_get_initial_item - word!("0x608c220fad2506e8c8cb91e3616e5329734b8084dc24cd0e29bf2b2afc3b459c"), - // account_set_item - word!("0xea6399e82bded79f07e787945a9a741097b7adb1c90b13fbf6edcdd5b8223f99"), - // account_get_map_item - word!("0x18db5479e15631e99b5cdd9e862ad292b02882e3b980f20766e50be455b3d265"), - // account_get_initial_map_item - word!("0xb51370bdd09abf4fd78fe227a8d69df6e70095b8146a40bf686a0b6184e73086"), - // account_set_map_item - word!("0x811e8cea8f54547838af32da86001a143c269f20b50dcad63e021eef5caf87d2"), - // account_get_initial_vault_root - word!("0x02649bba877c3f2485e19b13124212f308c496164daf5b67c5d6cac5cc1efa93"), - // account_get_vault_root - word!("0x06d2ed6e186f5a6ed429393c125d3299b8b8c9d374d4a70c07223270a226e4ee"), - // account_add_asset - word!("0x5c524e85edb7169a227ca22a0bd3b7358db47759bba0fb24848364ff02bfba09"), - // account_remove_asset - word!("0xd8a9ec09086027ee1c3123a484fe7b26c71ae8c67228556e84fefcfa4baad703"), - // account_get_asset - word!("0xfe9a9b1c47b2c4a26eba5e5c8f768c326b91eaeb4035d4543399bb47bcb17969"), - // account_get_initial_asset - word!("0x198e7beb67775fdf25509dc1c58bf8dfbd3b420dbe1dd614e4875623d2f4733f"), - // account_compute_delta_commitment - word!("0x81417260f41fb7d1f0e9b024dfff882d403f38bca98319827d108047fc3b3115"), - // account_get_num_procedures - word!("0xfd0ba549c2eb309268f9285ea2f689c50b70f625f79767ecbfc2827e61c76b01"), - // account_get_procedure_root - word!("0x46eca9419660d7b453da7926c22156d2e7e56f8d6855dad3044c359eb7a56eca"), - // account_was_procedure_called - word!("0xcdd2f93e20df0220a34f123fd8b99edbef4d8534db6f88567f1cad5507149161"), - // account_has_procedure - word!("0xc6d3f1dcbe531f4aeb6b6a7b2cebd9fbc460f473ac3d25a9a6a6febfbb572777"), - // faucet_mint_asset - word!("0xf6fbe441831ce43dfcd75e7295db2c10b9b6a6dc56b2b3828b2681d5005374bc"), - // faucet_burn_asset - word!("0xd2740b1a2177a038f5d48fc0642194b09cdc5d214db683bc0c327ef0f309ec13"), - // input_note_get_metadata - word!("0x447b342e38855a9402cde0ea52ecb5e4c1fe542b535a5364cb5caa8e94c82442"), - // input_note_get_assets_info - word!("0xe0817bed99fb61180e705b2c9e5ca8c8f0c62864953247a56acbc65b7d58c2d5"), - // input_note_get_script_root - word!("0x527036257e58c3a84cf0aa170fb3f219a4553db17d269279355ad164a2b90ac5"), - // input_note_get_storage_info - word!("0xb7f45ec34f7708355551dcf1f82c9c40e2c19252f8d5c98dcf9ef1aa0a3eb878"), - // input_note_get_serial_number - word!("0x25815e02b7976d8e5c297dde60d372cc142c81f702f424ac0920190528c547ee"), - // input_note_get_recipient - word!("0xd3c255177f9243bb1a523a87615bbe76dd5a3605fcae87eb9d3a626d4ecce33c"), - // output_note_create - word!("0x46617cab4fe225ec5ebcb00324257183c9b00e9748bbda51b768a8ffbed820b6"), - // output_note_get_metadata - word!("0xd089109a5468cda774bdd6ef2347a2bc0cbe599e4cf324bfd0d830ddcf541b9b"), - // output_note_get_assets_info - word!("0xa4e1135421226f499a2c9cad5dfeb72c8a0aaf75a6269831d72d1f6ffa17f98b"), - // output_note_get_recipient - word!("0xac423c9a37cd0dbe9f9ded8ce7cc3289d6632d9cba858aa9ac15706278dec0db"), - // output_note_add_asset - word!("0x816d7dfeb746dae0c6c1aa7056a14c132552f223c71d8f1626892bc84b3947b5"), - // output_note_set_attachment - word!("0x3fa10be4130e1d87e18c3707f0abf8349c42b141f41ac4be360e2328ccc93137"), - // tx_get_num_input_notes - word!("0xfcc186d4b65c584f3126dda1460b01eef977efd76f9e36f972554af28e33c685"), - // tx_get_input_notes_commitment - word!("0xc3a334434daa7d4ea15e1b2cb1a8000ad757f9348560a7246336662b77b0d89a"), - // tx_get_num_output_notes - word!("0x6a55f7cc1d20a75c36d474a59476c32faded0f5ff327cf942a8bc0fc0bb576e4"), - // tx_get_output_notes_commitment - word!("0x35149a69681842b93abe0cac01db65f901f634cbbc3d3366404ffc1bdae77d41"), - // tx_get_block_commitment - word!("0xe474b491a64d222397fcf83ee5db7b048061988e5e83ce99b91bae6fd75a3522"), - // tx_get_block_number - word!("0x297797dff54b8108dd2df254b95d43895d3f917ab10399efc62adaf861c905ae"), - // tx_get_block_timestamp - word!("0x7903185b847517debb6c2072364e3e757b99ee623e97c2bd0a4661316c5c5418"), - // tx_prepare_fpi - word!("0x9b19a239c7bf6cccacc15b4739678c6e76d52efcb5bd9a0b885f8c425cf750f9"), - // tx_exec_foreign_proc - word!("0x9ae36839e5702d1689075f7dccb6bf7eeda435a763e3e60b0df95023d3f83ef2"), - // tx_get_expiration_delta - word!("0x1e9cb08be26df46db852bf39c1f40d2b7fca686e23b058e2eec07b395723cfbd"), - // tx_update_expiration_block_delta - word!("0xc60e37f9a13f262a1df62654e083dc7e4c7d9897fdc7179e56b0789c72944193"), -]; diff --git a/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs b/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs index 9224ec47e5..1c3f3a6161 100644 --- a/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs +++ b/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs @@ -1,8 +1,7 @@ use core::fmt; -use miden_core::EventId; - use crate::errors::TransactionEventError; +use crate::vm::{EventId, EventName}; // CONSTANTS // ================================================================================================ @@ -20,64 +19,64 @@ include!(concat!(env!("OUT_DIR"), "/assets/transaction_events.rs")); #[repr(u64)] #[derive(Debug, Clone, Eq, PartialEq)] pub enum TransactionEventId { - AccountBeforeForeignLoad = ACCOUNT_BEFORE_FOREIGN_LOAD, + AccountBeforeForeignLoad = ACCOUNT_BEFORE_FOREIGN_LOAD_ID, - AccountVaultBeforeAddAsset = ACCOUNT_VAULT_BEFORE_ADD_ASSET, - AccountVaultAfterAddAsset = ACCOUNT_VAULT_AFTER_ADD_ASSET, + AccountVaultBeforeAddAsset = ACCOUNT_VAULT_BEFORE_ADD_ASSET_ID, + AccountVaultAfterAddAsset = ACCOUNT_VAULT_AFTER_ADD_ASSET_ID, - AccountVaultBeforeRemoveAsset = ACCOUNT_VAULT_BEFORE_REMOVE_ASSET, - AccountVaultAfterRemoveAsset = ACCOUNT_VAULT_AFTER_REMOVE_ASSET, + AccountVaultBeforeRemoveAsset = ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_ID, + AccountVaultAfterRemoveAsset = ACCOUNT_VAULT_AFTER_REMOVE_ASSET_ID, - AccountVaultBeforeGetAsset = ACCOUNT_VAULT_BEFORE_GET_ASSET, + AccountVaultBeforeGetAsset = ACCOUNT_VAULT_BEFORE_GET_ASSET_ID, - AccountStorageBeforeSetItem = ACCOUNT_STORAGE_BEFORE_SET_ITEM, - AccountStorageAfterSetItem = ACCOUNT_STORAGE_AFTER_SET_ITEM, + AccountStorageBeforeSetItem = ACCOUNT_STORAGE_BEFORE_SET_ITEM_ID, + AccountStorageAfterSetItem = ACCOUNT_STORAGE_AFTER_SET_ITEM_ID, - AccountStorageBeforeGetMapItem = ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM, + AccountStorageBeforeGetMapItem = ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_ID, - AccountStorageBeforeSetMapItem = ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM, - AccountStorageAfterSetMapItem = ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM, + AccountStorageBeforeSetMapItem = ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM_ID, + AccountStorageAfterSetMapItem = ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM_ID, - AccountBeforeIncrementNonce = ACCOUNT_BEFORE_INCREMENT_NONCE, - AccountAfterIncrementNonce = ACCOUNT_AFTER_INCREMENT_NONCE, + AccountBeforeIncrementNonce = ACCOUNT_BEFORE_INCREMENT_NONCE_ID, + AccountAfterIncrementNonce = ACCOUNT_AFTER_INCREMENT_NONCE_ID, - AccountPushProcedureIndex = ACCOUNT_PUSH_PROCEDURE_INDEX, + AccountPushProcedureIndex = ACCOUNT_PUSH_PROCEDURE_INDEX_ID, - NoteBeforeCreated = NOTE_BEFORE_CREATED, - NoteAfterCreated = NOTE_AFTER_CREATED, + NoteBeforeCreated = NOTE_BEFORE_CREATED_ID, + NoteAfterCreated = NOTE_AFTER_CREATED_ID, - NoteBeforeAddAsset = NOTE_BEFORE_ADD_ASSET, - NoteAfterAddAsset = NOTE_AFTER_ADD_ASSET, + NoteBeforeAddAsset = NOTE_BEFORE_ADD_ASSET_ID, + NoteAfterAddAsset = NOTE_AFTER_ADD_ASSET_ID, - NoteBeforeSetAttachment = NOTE_BEFORE_SET_ATTACHMENT, + NoteBeforeSetAttachment = NOTE_BEFORE_SET_ATTACHMENT_ID, - AuthRequest = AUTH_REQUEST, + AuthRequest = AUTH_REQUEST_ID, - PrologueStart = PROLOGUE_START, - PrologueEnd = PROLOGUE_END, + PrologueStart = PROLOGUE_START_ID, + PrologueEnd = PROLOGUE_END_ID, - NotesProcessingStart = NOTES_PROCESSING_START, - NotesProcessingEnd = NOTES_PROCESSING_END, + NotesProcessingStart = NOTES_PROCESSING_START_ID, + NotesProcessingEnd = NOTES_PROCESSING_END_ID, - NoteExecutionStart = NOTE_EXECUTION_START, - NoteExecutionEnd = NOTE_EXECUTION_END, + NoteExecutionStart = NOTE_EXECUTION_START_ID, + NoteExecutionEnd = NOTE_EXECUTION_END_ID, - TxScriptProcessingStart = TX_SCRIPT_PROCESSING_START, - TxScriptProcessingEnd = TX_SCRIPT_PROCESSING_END, + TxScriptProcessingStart = TX_SCRIPT_PROCESSING_START_ID, + TxScriptProcessingEnd = TX_SCRIPT_PROCESSING_END_ID, - EpilogueStart = EPILOGUE_START, - EpilogueEnd = EPILOGUE_END, + EpilogueStart = EPILOGUE_START_ID, + EpilogueEnd = EPILOGUE_END_ID, - EpilogueAuthProcStart = EPILOGUE_AUTH_PROC_START, - EpilogueAuthProcEnd = EPILOGUE_AUTH_PROC_END, + EpilogueAuthProcStart = EPILOGUE_AUTH_PROC_START_ID, + EpilogueAuthProcEnd = EPILOGUE_AUTH_PROC_END_ID, - EpilogueAfterTxCyclesObtained = EPILOGUE_AFTER_TX_CYCLES_OBTAINED, - EpilogueBeforeTxFeeRemovedFromAccount = EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT, + EpilogueAfterTxCyclesObtained = EPILOGUE_AFTER_TX_CYCLES_OBTAINED_ID, + EpilogueBeforeTxFeeRemovedFromAccount = EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_ID, - LinkMapSet = LINK_MAP_SET, - LinkMapGet = LINK_MAP_GET, + LinkMapSet = LINK_MAP_SET_ID, + LinkMapGet = LINK_MAP_GET_ID, - Unauthorized = AUTH_UNAUTHORIZED, + Unauthorized = AUTH_UNAUTHORIZED_ID, } impl TransactionEventId { @@ -92,11 +91,56 @@ impl TransactionEventId { pub fn event_id(&self) -> EventId { EventId::from_u64(self.clone() as u64) } + + /// Returns the [`EventName`] of the transaction event. + pub fn event_name(&self) -> &'static EventName { + match self { + Self::AccountBeforeForeignLoad => &ACCOUNT_BEFORE_FOREIGN_LOAD_NAME, + Self::AccountVaultBeforeAddAsset => &ACCOUNT_VAULT_BEFORE_ADD_ASSET_NAME, + Self::AccountVaultAfterAddAsset => &ACCOUNT_VAULT_AFTER_ADD_ASSET_NAME, + Self::AccountVaultBeforeRemoveAsset => &ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_NAME, + Self::AccountVaultAfterRemoveAsset => &ACCOUNT_VAULT_AFTER_REMOVE_ASSET_NAME, + Self::AccountVaultBeforeGetAsset => &ACCOUNT_VAULT_BEFORE_GET_ASSET_NAME, + Self::AccountStorageBeforeSetItem => &ACCOUNT_STORAGE_BEFORE_SET_ITEM_NAME, + Self::AccountStorageAfterSetItem => &ACCOUNT_STORAGE_AFTER_SET_ITEM_NAME, + Self::AccountStorageBeforeGetMapItem => &ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_NAME, + Self::AccountStorageBeforeSetMapItem => &ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM_NAME, + Self::AccountStorageAfterSetMapItem => &ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM_NAME, + Self::AccountBeforeIncrementNonce => &ACCOUNT_BEFORE_INCREMENT_NONCE_NAME, + Self::AccountAfterIncrementNonce => &ACCOUNT_AFTER_INCREMENT_NONCE_NAME, + Self::AccountPushProcedureIndex => &ACCOUNT_PUSH_PROCEDURE_INDEX_NAME, + Self::NoteBeforeCreated => &NOTE_BEFORE_CREATED_NAME, + Self::NoteAfterCreated => &NOTE_AFTER_CREATED_NAME, + Self::NoteBeforeAddAsset => &NOTE_BEFORE_ADD_ASSET_NAME, + Self::NoteAfterAddAsset => &NOTE_AFTER_ADD_ASSET_NAME, + Self::NoteBeforeSetAttachment => &NOTE_BEFORE_SET_ATTACHMENT_NAME, + Self::AuthRequest => &AUTH_REQUEST_NAME, + Self::PrologueStart => &PROLOGUE_START_NAME, + Self::PrologueEnd => &PROLOGUE_END_NAME, + Self::NotesProcessingStart => &NOTES_PROCESSING_START_NAME, + Self::NotesProcessingEnd => &NOTES_PROCESSING_END_NAME, + Self::NoteExecutionStart => &NOTE_EXECUTION_START_NAME, + Self::NoteExecutionEnd => &NOTE_EXECUTION_END_NAME, + Self::TxScriptProcessingStart => &TX_SCRIPT_PROCESSING_START_NAME, + Self::TxScriptProcessingEnd => &TX_SCRIPT_PROCESSING_END_NAME, + Self::EpilogueStart => &EPILOGUE_START_NAME, + Self::EpilogueEnd => &EPILOGUE_END_NAME, + Self::EpilogueAuthProcStart => &EPILOGUE_AUTH_PROC_START_NAME, + Self::EpilogueAuthProcEnd => &EPILOGUE_AUTH_PROC_END_NAME, + Self::EpilogueAfterTxCyclesObtained => &EPILOGUE_AFTER_TX_CYCLES_OBTAINED_NAME, + Self::EpilogueBeforeTxFeeRemovedFromAccount => { + &EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_NAME + }, + Self::LinkMapSet => &LINK_MAP_SET_NAME, + Self::LinkMapGet => &LINK_MAP_GET_NAME, + Self::Unauthorized => &AUTH_UNAUTHORIZED_NAME, + } + } } impl fmt::Display for TransactionEventId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{self:?}") + write!(f, "{}", self.event_name()) } } @@ -104,83 +148,85 @@ impl TryFrom for TransactionEventId { type Error = TransactionEventError; fn try_from(event_id: EventId) -> Result { - let raw = event_id.as_felt().as_int(); - - let name = EVENT_NAME_LUT.get(&raw).copied(); + let raw = event_id.as_felt().as_canonical_u64(); match raw { - ACCOUNT_BEFORE_FOREIGN_LOAD => Ok(TransactionEventId::AccountBeforeForeignLoad), + ACCOUNT_BEFORE_FOREIGN_LOAD_ID => Ok(TransactionEventId::AccountBeforeForeignLoad), - ACCOUNT_VAULT_BEFORE_ADD_ASSET => Ok(TransactionEventId::AccountVaultBeforeAddAsset), - ACCOUNT_VAULT_AFTER_ADD_ASSET => Ok(TransactionEventId::AccountVaultAfterAddAsset), + ACCOUNT_VAULT_BEFORE_ADD_ASSET_ID => Ok(TransactionEventId::AccountVaultBeforeAddAsset), + ACCOUNT_VAULT_AFTER_ADD_ASSET_ID => Ok(TransactionEventId::AccountVaultAfterAddAsset), - ACCOUNT_VAULT_BEFORE_REMOVE_ASSET => { + ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_ID => { Ok(TransactionEventId::AccountVaultBeforeRemoveAsset) }, - ACCOUNT_VAULT_AFTER_REMOVE_ASSET => { + ACCOUNT_VAULT_AFTER_REMOVE_ASSET_ID => { Ok(TransactionEventId::AccountVaultAfterRemoveAsset) }, - ACCOUNT_VAULT_BEFORE_GET_ASSET => Ok(TransactionEventId::AccountVaultBeforeGetAsset), + ACCOUNT_VAULT_BEFORE_GET_ASSET_ID => Ok(TransactionEventId::AccountVaultBeforeGetAsset), - ACCOUNT_STORAGE_BEFORE_SET_ITEM => Ok(TransactionEventId::AccountStorageBeforeSetItem), - ACCOUNT_STORAGE_AFTER_SET_ITEM => Ok(TransactionEventId::AccountStorageAfterSetItem), + ACCOUNT_STORAGE_BEFORE_SET_ITEM_ID => { + Ok(TransactionEventId::AccountStorageBeforeSetItem) + }, + ACCOUNT_STORAGE_AFTER_SET_ITEM_ID => Ok(TransactionEventId::AccountStorageAfterSetItem), - ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM => { + ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_ID => { Ok(TransactionEventId::AccountStorageBeforeGetMapItem) }, - ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM => { + ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM_ID => { Ok(TransactionEventId::AccountStorageBeforeSetMapItem) }, - ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM => { + ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM_ID => { Ok(TransactionEventId::AccountStorageAfterSetMapItem) }, - ACCOUNT_BEFORE_INCREMENT_NONCE => Ok(TransactionEventId::AccountBeforeIncrementNonce), - ACCOUNT_AFTER_INCREMENT_NONCE => Ok(TransactionEventId::AccountAfterIncrementNonce), + ACCOUNT_BEFORE_INCREMENT_NONCE_ID => { + Ok(TransactionEventId::AccountBeforeIncrementNonce) + }, + ACCOUNT_AFTER_INCREMENT_NONCE_ID => Ok(TransactionEventId::AccountAfterIncrementNonce), - ACCOUNT_PUSH_PROCEDURE_INDEX => Ok(TransactionEventId::AccountPushProcedureIndex), + ACCOUNT_PUSH_PROCEDURE_INDEX_ID => Ok(TransactionEventId::AccountPushProcedureIndex), - NOTE_BEFORE_CREATED => Ok(TransactionEventId::NoteBeforeCreated), - NOTE_AFTER_CREATED => Ok(TransactionEventId::NoteAfterCreated), + NOTE_BEFORE_CREATED_ID => Ok(TransactionEventId::NoteBeforeCreated), + NOTE_AFTER_CREATED_ID => Ok(TransactionEventId::NoteAfterCreated), - NOTE_BEFORE_ADD_ASSET => Ok(TransactionEventId::NoteBeforeAddAsset), - NOTE_AFTER_ADD_ASSET => Ok(TransactionEventId::NoteAfterAddAsset), + NOTE_BEFORE_ADD_ASSET_ID => Ok(TransactionEventId::NoteBeforeAddAsset), + NOTE_AFTER_ADD_ASSET_ID => Ok(TransactionEventId::NoteAfterAddAsset), - NOTE_BEFORE_SET_ATTACHMENT => Ok(TransactionEventId::NoteBeforeSetAttachment), + NOTE_BEFORE_SET_ATTACHMENT_ID => Ok(TransactionEventId::NoteBeforeSetAttachment), - AUTH_REQUEST => Ok(TransactionEventId::AuthRequest), + AUTH_REQUEST_ID => Ok(TransactionEventId::AuthRequest), - PROLOGUE_START => Ok(TransactionEventId::PrologueStart), - PROLOGUE_END => Ok(TransactionEventId::PrologueEnd), + PROLOGUE_START_ID => Ok(TransactionEventId::PrologueStart), + PROLOGUE_END_ID => Ok(TransactionEventId::PrologueEnd), - NOTES_PROCESSING_START => Ok(TransactionEventId::NotesProcessingStart), - NOTES_PROCESSING_END => Ok(TransactionEventId::NotesProcessingEnd), + NOTES_PROCESSING_START_ID => Ok(TransactionEventId::NotesProcessingStart), + NOTES_PROCESSING_END_ID => Ok(TransactionEventId::NotesProcessingEnd), - NOTE_EXECUTION_START => Ok(TransactionEventId::NoteExecutionStart), - NOTE_EXECUTION_END => Ok(TransactionEventId::NoteExecutionEnd), + NOTE_EXECUTION_START_ID => Ok(TransactionEventId::NoteExecutionStart), + NOTE_EXECUTION_END_ID => Ok(TransactionEventId::NoteExecutionEnd), - TX_SCRIPT_PROCESSING_START => Ok(TransactionEventId::TxScriptProcessingStart), - TX_SCRIPT_PROCESSING_END => Ok(TransactionEventId::TxScriptProcessingEnd), + TX_SCRIPT_PROCESSING_START_ID => Ok(TransactionEventId::TxScriptProcessingStart), + TX_SCRIPT_PROCESSING_END_ID => Ok(TransactionEventId::TxScriptProcessingEnd), - EPILOGUE_START => Ok(TransactionEventId::EpilogueStart), - EPILOGUE_AUTH_PROC_START => Ok(TransactionEventId::EpilogueAuthProcStart), - EPILOGUE_AUTH_PROC_END => Ok(TransactionEventId::EpilogueAuthProcEnd), - EPILOGUE_AFTER_TX_CYCLES_OBTAINED => { + EPILOGUE_START_ID => Ok(TransactionEventId::EpilogueStart), + EPILOGUE_AUTH_PROC_START_ID => Ok(TransactionEventId::EpilogueAuthProcStart), + EPILOGUE_AUTH_PROC_END_ID => Ok(TransactionEventId::EpilogueAuthProcEnd), + EPILOGUE_AFTER_TX_CYCLES_OBTAINED_ID => { Ok(TransactionEventId::EpilogueAfterTxCyclesObtained) }, - EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT => { + EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_ID => { Ok(TransactionEventId::EpilogueBeforeTxFeeRemovedFromAccount) }, - EPILOGUE_END => Ok(TransactionEventId::EpilogueEnd), + EPILOGUE_END_ID => Ok(TransactionEventId::EpilogueEnd), - LINK_MAP_SET => Ok(TransactionEventId::LinkMapSet), - LINK_MAP_GET => Ok(TransactionEventId::LinkMapGet), + LINK_MAP_SET_ID => Ok(TransactionEventId::LinkMapSet), + LINK_MAP_GET_ID => Ok(TransactionEventId::LinkMapGet), - AUTH_UNAUTHORIZED => Ok(TransactionEventId::Unauthorized), + AUTH_UNAUTHORIZED_ID => Ok(TransactionEventId::Unauthorized), - _ => Err(TransactionEventError::InvalidTransactionEvent(event_id, name)), + _ => Err(TransactionEventError::InvalidTransactionEvent(event_id)), } } } diff --git a/crates/miden-protocol/src/transaction/mod.rs b/crates/miden-protocol/src/transaction/mod.rs index feef2fc878..977155e755 100644 --- a/crates/miden-protocol/src/transaction/mod.rs +++ b/crates/miden-protocol/src/transaction/mod.rs @@ -19,14 +19,18 @@ pub use executed_tx::{ExecutedTransaction, TransactionMeasurements}; pub use inputs::{AccountInputs, InputNote, InputNotes, ToInputNoteCommitments, TransactionInputs}; pub use kernel::{TransactionAdviceInputs, TransactionEventId, TransactionKernel, memory}; pub use ordered_transactions::OrderedTransactionHeaders; -pub use outputs::{OutputNote, OutputNotes, TransactionOutputs}; -pub use partial_blockchain::PartialBlockchain; -pub use proven_tx::{ - InputNoteCommitment, - ProvenTransaction, - ProvenTransactionBuilder, - TxAccountUpdate, +pub use outputs::{ + OutputNote, + OutputNoteCollection, + OutputNotes, + PrivateNoteHeader, + PublicOutputNote, + RawOutputNote, + RawOutputNotes, + TransactionOutputs, }; +pub use partial_blockchain::PartialBlockchain; +pub use proven_tx::{InputNoteCommitment, ProvenTransaction, TxAccountUpdate}; pub use transaction_id::TransactionId; pub use tx_args::{TransactionArgs, TransactionScript}; pub use tx_header::TransactionHeader; diff --git a/crates/miden-protocol/src/transaction/ordered_transactions.rs b/crates/miden-protocol/src/transaction/ordered_transactions.rs index 7f8f805e16..fbf32c8e72 100644 --- a/crates/miden-protocol/src/transaction/ordered_transactions.rs +++ b/crates/miden-protocol/src/transaction/ordered_transactions.rs @@ -2,7 +2,13 @@ use alloc::vec::Vec; use crate::account::AccountId; use crate::transaction::{TransactionHeader, TransactionId}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Hasher, Word, ZERO}; // ORDERED TRANSACTION HEADERS diff --git a/crates/miden-protocol/src/transaction/outputs.rs b/crates/miden-protocol/src/transaction/outputs.rs deleted file mode 100644 index 179da30367..0000000000 --- a/crates/miden-protocol/src/transaction/outputs.rs +++ /dev/null @@ -1,371 +0,0 @@ -use alloc::collections::BTreeSet; -use alloc::string::ToString; -use alloc::vec::Vec; -use core::fmt::Debug; - -use crate::account::AccountHeader; -use crate::asset::FungibleAsset; -use crate::block::BlockNumber; -use crate::errors::TransactionOutputError; -use crate::note::{ - Note, - NoteAssets, - NoteHeader, - NoteId, - NoteMetadata, - NoteRecipient, - PartialNote, - compute_note_commitment, -}; -use crate::utils::serde::{ - ByteReader, - ByteWriter, - Deserializable, - DeserializationError, - Serializable, -}; -use crate::{Felt, Hasher, MAX_OUTPUT_NOTES_PER_TX, Word}; - -// TRANSACTION OUTPUTS -// ================================================================================================ - -/// Describes the result of executing a transaction. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TransactionOutputs { - /// Information related to the account's final state. - pub account: AccountHeader, - /// The commitment to the delta computed by the transaction kernel. - pub account_delta_commitment: Word, - /// Set of output notes created by the transaction. - pub output_notes: OutputNotes, - /// The fee of the transaction. - pub fee: FungibleAsset, - /// Defines up to which block the transaction is considered valid. - pub expiration_block_num: BlockNumber, -} - -impl TransactionOutputs { - // CONSTANTS - // -------------------------------------------------------------------------------------------- - - /// The index of the word at which the final account nonce is stored on the output stack. - pub const OUTPUT_NOTES_COMMITMENT_WORD_IDX: usize = 0; - - /// The index of the word at which the account update commitment is stored on the output stack. - pub const ACCOUNT_UPDATE_COMMITMENT_WORD_IDX: usize = 1; - - /// The index of the word at which the fee asset is stored on the output stack. - pub const FEE_ASSET_WORD_IDX: usize = 2; - - /// The index of the item at which the expiration block height is stored on the output stack. - pub const EXPIRATION_BLOCK_ELEMENT_IDX: usize = 12; -} - -impl Serializable for TransactionOutputs { - fn write_into(&self, target: &mut W) { - self.account.write_into(target); - self.account_delta_commitment.write_into(target); - self.output_notes.write_into(target); - self.fee.write_into(target); - self.expiration_block_num.write_into(target); - } -} - -impl Deserializable for TransactionOutputs { - fn read_from(source: &mut R) -> Result { - let account = AccountHeader::read_from(source)?; - let account_delta_commitment = Word::read_from(source)?; - let output_notes = OutputNotes::read_from(source)?; - let fee = FungibleAsset::read_from(source)?; - let expiration_block_num = BlockNumber::read_from(source)?; - - Ok(Self { - account, - account_delta_commitment, - output_notes, - fee, - expiration_block_num, - }) - } -} - -// OUTPUT NOTES -// ================================================================================================ - -/// Contains a list of output notes of a transaction. The list can be empty if the transaction does -/// not produce any notes. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct OutputNotes { - notes: Vec, - commitment: Word, -} - -impl OutputNotes { - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns new [OutputNotes] instantiated from the provide vector of notes. - /// - /// # Errors - /// Returns an error if: - /// - The total number of notes is greater than [`MAX_OUTPUT_NOTES_PER_TX`]. - /// - The vector of notes contains duplicates. - pub fn new(notes: Vec) -> Result { - if notes.len() > MAX_OUTPUT_NOTES_PER_TX { - return Err(TransactionOutputError::TooManyOutputNotes(notes.len())); - } - - let mut seen_notes = BTreeSet::new(); - for note in notes.iter() { - if !seen_notes.insert(note.id()) { - return Err(TransactionOutputError::DuplicateOutputNote(note.id())); - } - } - - let commitment = Self::compute_commitment(notes.iter().map(OutputNote::header)); - - Ok(Self { notes, commitment }) - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Returns the commitment to the output notes. - /// - /// The commitment is computed as a sequential hash of (hash, metadata) tuples for the notes - /// created in a transaction. - pub fn commitment(&self) -> Word { - self.commitment - } - /// Returns total number of output notes. - pub fn num_notes(&self) -> usize { - self.notes.len() - } - - /// Returns true if this [OutputNotes] does not contain any notes. - pub fn is_empty(&self) -> bool { - self.notes.is_empty() - } - - /// Returns a reference to the note located at the specified index. - pub fn get_note(&self, idx: usize) -> &OutputNote { - &self.notes[idx] - } - - // ITERATORS - // -------------------------------------------------------------------------------------------- - - /// Returns an iterator over notes in this [OutputNotes]. - pub fn iter(&self) -> impl Iterator { - self.notes.iter() - } - - // HELPERS - // -------------------------------------------------------------------------------------------- - - /// Computes a commitment to output notes. - /// - /// - For an empty list, [`Word::empty`] is returned. - /// - For a non-empty list of notes, this is a sequential hash of (note_id, metadata_commitment) - /// tuples for the notes created in a transaction, where `metadata_commitment` is the return - /// value of [`NoteMetadata::to_commitment`]. - pub(crate) fn compute_commitment<'header>( - notes: impl ExactSizeIterator, - ) -> Word { - if notes.len() == 0 { - return Word::empty(); - } - - let mut elements: Vec = Vec::with_capacity(notes.len() * 8); - for note_header in notes { - elements.extend_from_slice(note_header.id().as_elements()); - elements.extend_from_slice(note_header.metadata().to_commitment().as_elements()); - } - - Hasher::hash_elements(&elements) - } -} - -// SERIALIZATION -// ------------------------------------------------------------------------------------------------ - -impl Serializable for OutputNotes { - fn write_into(&self, target: &mut W) { - // assert is OK here because we enforce max number of notes in the constructor - assert!(self.notes.len() <= u16::MAX.into()); - target.write_u16(self.notes.len() as u16); - target.write_many(&self.notes); - } -} - -impl Deserializable for OutputNotes { - fn read_from(source: &mut R) -> Result { - let num_notes = source.read_u16()?; - let notes = source.read_many::(num_notes.into())?; - Self::new(notes).map_err(|err| DeserializationError::InvalidValue(err.to_string())) - } -} - -// OUTPUT NOTE -// ================================================================================================ - -const FULL: u8 = 0; -const PARTIAL: u8 = 1; -const HEADER: u8 = 2; - -/// The types of note outputs supported by the transaction kernel. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum OutputNote { - Full(Note), - Partial(PartialNote), - Header(NoteHeader), -} - -impl OutputNote { - /// The assets contained in the note. - pub fn assets(&self) -> Option<&NoteAssets> { - match self { - OutputNote::Full(note) => Some(note.assets()), - OutputNote::Partial(note) => Some(note.assets()), - OutputNote::Header(_) => None, - } - } - - /// Unique note identifier. - /// - /// This value is both an unique identifier and a commitment to the note. - pub fn id(&self) -> NoteId { - match self { - OutputNote::Full(note) => note.id(), - OutputNote::Partial(note) => note.id(), - OutputNote::Header(note) => note.id(), - } - } - - /// Returns the recipient of the processed [`Full`](OutputNote::Full) output note, [`None`] if - /// the note type is not [`Full`](OutputNote::Full). - /// - /// See [crate::note::NoteRecipient] for more details. - pub fn recipient(&self) -> Option<&NoteRecipient> { - match self { - OutputNote::Full(note) => Some(note.recipient()), - OutputNote::Partial(_) => None, - OutputNote::Header(_) => None, - } - } - - /// Returns the recipient digest of the processed [`Full`](OutputNote::Full) or - /// [`Partial`](OutputNote::Partial) output note. Returns [`None`] if the note type is - /// [`Header`](OutputNote::Header). - /// - /// See [crate::note::NoteRecipient] for more details. - pub fn recipient_digest(&self) -> Option { - match self { - OutputNote::Full(note) => Some(note.recipient().digest()), - OutputNote::Partial(note) => Some(note.recipient_digest()), - OutputNote::Header(_) => None, - } - } - - /// Note's metadata. - pub fn metadata(&self) -> &NoteMetadata { - match self { - OutputNote::Full(note) => note.metadata(), - OutputNote::Partial(note) => note.metadata(), - OutputNote::Header(note) => note.metadata(), - } - } - - /// Erase private note information. - /// - /// Specifically: - /// - Full private notes are converted into note headers. - /// - All partial notes are converted into note headers. - pub fn shrink(&self) -> Self { - match self { - OutputNote::Full(note) if note.metadata().is_private() => { - OutputNote::Header(note.header().clone()) - }, - OutputNote::Partial(note) => OutputNote::Header(note.header().clone()), - _ => self.clone(), - } - } - - /// Returns a reference to the [`NoteHeader`] of this note. - pub fn header(&self) -> &NoteHeader { - match self { - OutputNote::Full(note) => note.header(), - OutputNote::Partial(note) => note.header(), - OutputNote::Header(header) => header, - } - } - - /// Returns a commitment to the note and its metadata. - /// - /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) - pub fn commitment(&self) -> Word { - compute_note_commitment(self.id(), self.metadata()) - } -} - -// SERIALIZATION -// ------------------------------------------------------------------------------------------------ - -impl Serializable for OutputNote { - fn write_into(&self, target: &mut W) { - match self { - OutputNote::Full(note) => { - target.write(FULL); - target.write(note); - }, - OutputNote::Partial(note) => { - target.write(PARTIAL); - target.write(note); - }, - OutputNote::Header(note) => { - target.write(HEADER); - target.write(note); - }, - } - } -} - -impl Deserializable for OutputNote { - fn read_from(source: &mut R) -> Result { - match source.read_u8()? { - FULL => Ok(OutputNote::Full(Note::read_from(source)?)), - PARTIAL => Ok(OutputNote::Partial(PartialNote::read_from(source)?)), - HEADER => Ok(OutputNote::Header(NoteHeader::read_from(source)?)), - v => Err(DeserializationError::InvalidValue(format!("invalid note type: {v}"))), - } - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod output_notes_tests { - use assert_matches::assert_matches; - - use super::OutputNotes; - use crate::Word; - use crate::errors::TransactionOutputError; - use crate::note::Note; - use crate::transaction::OutputNote; - - #[test] - fn test_duplicate_output_notes() -> anyhow::Result<()> { - let mock_note = Note::mock_noop(Word::empty()); - let mock_note_id = mock_note.id(); - let mock_note_clone = mock_note.clone(); - - let error = - OutputNotes::new(vec![OutputNote::Full(mock_note), OutputNote::Full(mock_note_clone)]) - .expect_err("input notes creation should fail"); - - assert_matches!(error, TransactionOutputError::DuplicateOutputNote(note_id) if note_id == mock_note_id); - - Ok(()) - } -} diff --git a/crates/miden-protocol/src/transaction/outputs/mod.rs b/crates/miden-protocol/src/transaction/outputs/mod.rs new file mode 100644 index 0000000000..da4e7f2cac --- /dev/null +++ b/crates/miden-protocol/src/transaction/outputs/mod.rs @@ -0,0 +1,100 @@ +use core::fmt::Debug; + +use crate::Word; +use crate::account::AccountHeader; +use crate::asset::FungibleAsset; +use crate::block::BlockNumber; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +mod notes; +pub use notes::{ + OutputNote, + OutputNoteCollection, + OutputNotes, + PrivateNoteHeader, + PublicOutputNote, + RawOutputNote, + RawOutputNotes, +}; + +#[cfg(test)] +mod tests; + +// TRANSACTION OUTPUTS +// ================================================================================================ + +/// Describes the result of executing a transaction. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TransactionOutputs { + /// Information related to the account's final state. + pub account: AccountHeader, + /// The commitment to the delta computed by the transaction kernel. + pub account_delta_commitment: Word, + /// Set of output notes created by the transaction. + pub output_notes: RawOutputNotes, + /// The fee of the transaction. + pub fee: FungibleAsset, + /// Defines up to which block the transaction is considered valid. + pub expiration_block_num: BlockNumber, +} + +impl TransactionOutputs { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// The element index starting from which the output notes commitment is stored on the output + /// stack. + pub const OUTPUT_NOTES_COMMITMENT_WORD_IDX: usize = 0; + + /// The element index starting from which the account update commitment word is stored on the + /// output stack. + pub const ACCOUNT_UPDATE_COMMITMENT_WORD_IDX: usize = 4; + + /// The index of the element at which the ID suffix of the faucet that issues the native asset + /// is stored on the output stack. + pub const NATIVE_ASSET_ID_SUFFIX_ELEMENT_IDX: usize = 8; + + /// The index of the element at which the ID prefix of the faucet that issues the native asset + /// is stored on the output stack. + pub const NATIVE_ASSET_ID_PREFIX_ELEMENT_IDX: usize = 9; + + /// The index of the element at which the fee amount is stored on the output stack. + pub const FEE_AMOUNT_ELEMENT_IDX: usize = 10; + + /// The index of the item at which the expiration block height is stored on the output stack. + pub const EXPIRATION_BLOCK_ELEMENT_IDX: usize = 11; +} + +impl Serializable for TransactionOutputs { + fn write_into(&self, target: &mut W) { + self.account.write_into(target); + self.account_delta_commitment.write_into(target); + self.output_notes.write_into(target); + self.fee.write_into(target); + self.expiration_block_num.write_into(target); + } +} + +impl Deserializable for TransactionOutputs { + fn read_from(source: &mut R) -> Result { + let account = AccountHeader::read_from(source)?; + let account_delta_commitment = Word::read_from(source)?; + let output_notes = RawOutputNotes::read_from(source)?; + let fee = FungibleAsset::read_from(source)?; + let expiration_block_num = BlockNumber::read_from(source)?; + + Ok(Self { + account, + account_delta_commitment, + output_notes, + fee, + expiration_block_num, + }) + } +} diff --git a/crates/miden-protocol/src/transaction/outputs/notes.rs b/crates/miden-protocol/src/transaction/outputs/notes.rs new file mode 100644 index 0000000000..fa20c3cfd6 --- /dev/null +++ b/crates/miden-protocol/src/transaction/outputs/notes.rs @@ -0,0 +1,614 @@ +use alloc::collections::BTreeSet; +use alloc::string::ToString; +use alloc::vec::Vec; +use core::fmt::Debug; + +use crate::constants::NOTE_MAX_SIZE; +use crate::errors::{OutputNoteError, TransactionOutputError}; +use crate::note::{ + Note, + NoteAssets, + NoteHeader, + NoteId, + NoteMetadata, + NoteRecipient, + PartialNote, + compute_note_commitment, +}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, MAX_OUTPUT_NOTES_PER_TX, Word}; + +// OUTPUT NOTE COLLECTION +// ================================================================================================ + +/// Contains a list of output notes of a transaction. The list can be empty if the transaction does +/// not produce any notes. +/// +/// This struct is generic over the note type `N`, allowing it to be used with both +/// [`RawOutputNote`] (in [`ExecutedTransaction`](crate::transaction::ExecutedTransaction)) and +/// [`OutputNote`] (in [`ProvenTransaction`](crate::transaction::ProvenTransaction)). +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OutputNoteCollection { + notes: Vec, + commitment: Word, +} + +impl OutputNoteCollection +where + for<'a> &'a NoteHeader: From<&'a N>, + for<'a> NoteId: From<&'a N>, +{ + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- + + /// Returns new [OutputNoteCollection] instantiated from the provided vector of notes. + /// + /// # Errors + /// Returns an error if: + /// - The total number of notes is greater than [`MAX_OUTPUT_NOTES_PER_TX`]. + /// - The vector of notes contains duplicates. + pub fn new(notes: Vec) -> Result { + if notes.len() > MAX_OUTPUT_NOTES_PER_TX { + return Err(TransactionOutputError::TooManyOutputNotes(notes.len())); + } + + let mut seen_notes = BTreeSet::new(); + for note in notes.iter() { + let note_id = NoteId::from(note); + if !seen_notes.insert(note_id) { + return Err(TransactionOutputError::DuplicateOutputNote(note_id)); + } + } + + let commitment = Self::compute_commitment(notes.iter().map(<&NoteHeader>::from)); + + Ok(Self { notes, commitment }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the commitment to the output notes. + /// + /// The commitment is computed as a sequential hash of (note ID, metadata) tuples for the notes + /// created in a transaction. + pub fn commitment(&self) -> Word { + self.commitment + } + + /// Returns total number of output notes. + pub fn num_notes(&self) -> usize { + self.notes.len() + } + + /// Returns true if this [OutputNoteCollection] does not contain any notes. + pub fn is_empty(&self) -> bool { + self.notes.is_empty() + } + + /// Returns a reference to the note located at the specified index. + pub fn get_note(&self, idx: usize) -> &N { + &self.notes[idx] + } + + // ITERATORS + // -------------------------------------------------------------------------------------------- + + /// Returns an iterator over notes in this [OutputNoteCollection]. + pub fn iter(&self) -> impl Iterator { + self.notes.iter() + } + + // HELPERS + // -------------------------------------------------------------------------------------------- + + /// Computes a commitment to output notes. + /// + /// - For an empty list, [`Word::empty`] is returned. + /// - For a non-empty list of notes, this is a sequential hash of (note_id, metadata_commitment) + /// tuples for the notes created in a transaction, where `metadata_commitment` is the return + /// value of [`NoteMetadata::to_commitment`]. + pub(crate) fn compute_commitment<'header>( + notes: impl ExactSizeIterator, + ) -> Word { + if notes.len() == 0 { + return Word::empty(); + } + + let mut elements: Vec = Vec::with_capacity(notes.len() * 8); + for note_header in notes { + elements.extend_from_slice(note_header.id().as_elements()); + elements.extend_from_slice(note_header.metadata().to_commitment().as_elements()); + } + + Hasher::hash_elements(&elements) + } +} + +// SERIALIZATION +// ------------------------------------------------------------------------------------------------ + +impl Serializable for OutputNoteCollection { + fn write_into(&self, target: &mut W) { + // assert is OK here because we enforce max number of notes in the constructor + assert!(self.notes.len() <= u16::MAX.into()); + target.write_u16(self.notes.len() as u16); + target.write_many(&self.notes); + } +} + +impl Deserializable for OutputNoteCollection +where + N: Deserializable, + for<'a> &'a NoteHeader: From<&'a N>, + for<'a> NoteId: From<&'a N>, +{ + fn read_from(source: &mut R) -> Result { + let num_notes = source.read_u16()?; + let notes = source.read_many_iter::(num_notes.into())?.collect::>()?; + Self::new(notes).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + +// RAW OUTPUT NOTES +// ================================================================================================ + +/// Output notes produced during transaction execution (before proving). +/// +/// Contains [`RawOutputNote`] instances which represent notes as they exist immediately after +/// transaction execution. +pub type RawOutputNotes = OutputNoteCollection; + +/// The types of note outputs produced during transaction execution (before proving). +/// +/// This enum represents notes as they exist immediately after transaction execution, +/// before they are processed for inclusion in a proven transaction. It includes: +/// - Full notes with all details (public or private) +/// - Partial notes (notes created with only recipient digest, not full recipient details) +/// +/// During proving, these are converted to [`OutputNote`] via the +/// [`to_output_note`](Self::to_output_note) method, which enforces size limits on public notes and +/// converts private/partial notes to headers. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RawOutputNote { + Full(Note), + Partial(PartialNote), +} + +impl RawOutputNote { + const FULL: u8 = 0; + const PARTIAL: u8 = 1; + + /// The assets contained in the note. + pub fn assets(&self) -> &NoteAssets { + match self { + Self::Full(note) => note.assets(), + Self::Partial(note) => note.assets(), + } + } + + /// Unique note identifier. + /// + /// This value is both an unique identifier and a commitment to the note. + pub fn id(&self) -> NoteId { + match self { + Self::Full(note) => note.id(), + Self::Partial(note) => note.id(), + } + } + + /// Returns the recipient of the processed [`Full`](RawOutputNote::Full) output note, [`None`] + /// if the note type is not [`Full`](RawOutputNote::Full). + /// + /// See [crate::note::NoteRecipient] for more details. + pub fn recipient(&self) -> Option<&NoteRecipient> { + match self { + Self::Full(note) => Some(note.recipient()), + Self::Partial(_) => None, + } + } + + /// Returns the recipient digest of the output note. + /// + /// See [crate::note::NoteRecipient] for more details. + pub fn recipient_digest(&self) -> Word { + match self { + RawOutputNote::Full(note) => note.recipient().digest(), + RawOutputNote::Partial(note) => note.recipient_digest(), + } + } + + /// Returns the note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + match self { + Self::Full(note) => note.metadata(), + Self::Partial(note) => note.metadata(), + } + } + + /// Converts this output note to a proven output note. + /// + /// This method performs the following transformations: + /// - Private notes (full or partial) are converted into note headers (only public info + /// retained). + /// - Full public notes are wrapped in [`PublicOutputNote`], which enforces size limits + /// + /// # Errors + /// Returns an error if a public note exceeds the maximum allowed size ([`NOTE_MAX_SIZE`]). + pub fn to_output_note(&self) -> Result { + match self { + Self::Full(note) if note.metadata().is_private() => { + Ok(OutputNote::Private(PrivateNoteHeader::new(note.header().clone())?)) + }, + Self::Full(note) => Ok(OutputNote::Public(PublicOutputNote::new(note.clone())?)), + Self::Partial(note) => { + Ok(OutputNote::Private(PrivateNoteHeader::new(note.header().clone())?)) + }, + } + } + + /// Returns a reference to the [`NoteHeader`] of this note. + pub fn header(&self) -> &NoteHeader { + match self { + Self::Full(note) => note.header(), + Self::Partial(note) => note.header(), + } + } + + /// Returns a commitment to the note and its metadata. + /// + /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) + pub fn commitment(&self) -> Word { + compute_note_commitment(self.id(), self.metadata()) + } +} + +impl From<&RawOutputNote> for NoteId { + fn from(note: &RawOutputNote) -> Self { + note.id() + } +} + +impl<'note> From<&'note RawOutputNote> for &'note NoteHeader { + fn from(note: &'note RawOutputNote) -> Self { + note.header() + } +} + +impl Serializable for RawOutputNote { + fn write_into(&self, target: &mut W) { + match self { + Self::Full(note) => { + target.write(Self::FULL); + target.write(note); + }, + Self::Partial(note) => { + target.write(Self::PARTIAL); + target.write(note); + }, + } + } + + fn get_size_hint(&self) -> usize { + // Serialized size of the enum tag. + let tag_size = 0u8.get_size_hint(); + + match self { + Self::Full(note) => tag_size + note.get_size_hint(), + Self::Partial(note) => tag_size + note.get_size_hint(), + } + } +} + +impl Deserializable for RawOutputNote { + fn read_from(source: &mut R) -> Result { + match source.read_u8()? { + Self::FULL => Ok(Self::Full(Note::read_from(source)?)), + Self::PARTIAL => Ok(Self::Partial(PartialNote::read_from(source)?)), + v => Err(DeserializationError::InvalidValue(format!("invalid output note type: {v}"))), + } + } +} + +// OUTPUT NOTES +// ================================================================================================ + +/// Output notes in a proven transaction. +/// +/// Contains [`OutputNote`] instances which have been processed for inclusion in proven +/// transactions, with size limits enforced on public notes. +pub type OutputNotes = OutputNoteCollection; + +/// Output note types that can appear in a proven transaction. +/// +/// This enum represents the final form of output notes after proving. Unlike [`RawOutputNote`], +/// this enum: +/// - Does not include partial notes (they are converted to headers). +/// - Wraps public notes in [`PublicOutputNote`] which enforces size limits. +/// - Contains only the minimal information needed for verification. +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum OutputNote { + /// A public note with full details, size-validated. + Public(PublicOutputNote), + /// A note private header (for private notes). + Private(PrivateNoteHeader), +} + +impl OutputNote { + const PUBLIC: u8 = 0; + const PRIVATE: u8 = 1; + + /// Unique note identifier. + /// + /// This value is both an unique identifier and a commitment to the note. + pub fn id(&self) -> NoteId { + match self { + Self::Public(note) => note.id(), + Self::Private(header) => header.id(), + } + } + + /// Note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + match self { + Self::Public(note) => note.metadata(), + Self::Private(header) => header.metadata(), + } + } + + /// The assets contained in the note, if available. + /// + /// Returns `Some` for public notes, `None` for private notes. + pub fn assets(&self) -> Option<&NoteAssets> { + match self { + Self::Public(note) => Some(note.assets()), + Self::Private(_) => None, + } + } + + /// Returns a commitment to the note and its metadata. + /// + /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) + pub fn to_commitment(&self) -> Word { + compute_note_commitment(self.id(), self.metadata()) + } + + /// Returns the recipient of the public note, if this is a public note. + pub fn recipient(&self) -> Option<&NoteRecipient> { + match self { + Self::Public(note) => Some(note.recipient()), + Self::Private(_) => None, + } + } +} + +// CONVERSIONS +// ------------------------------------------------------------------------------------------------ + +impl<'note> From<&'note OutputNote> for &'note NoteHeader { + fn from(value: &'note OutputNote) -> Self { + match value { + OutputNote::Public(note) => note.header(), + OutputNote::Private(header) => &header.0, + } + } +} + +impl From<&OutputNote> for NoteId { + fn from(value: &OutputNote) -> Self { + value.id() + } +} + +// SERIALIZATION +// ------------------------------------------------------------------------------------------------ + +impl Serializable for OutputNote { + fn write_into(&self, target: &mut W) { + match self { + Self::Public(note) => { + target.write(Self::PUBLIC); + target.write(note); + }, + Self::Private(header) => { + target.write(Self::PRIVATE); + target.write(header); + }, + } + } + + fn get_size_hint(&self) -> usize { + let tag_size = 0u8.get_size_hint(); + match self { + Self::Public(note) => tag_size + note.get_size_hint(), + Self::Private(header) => tag_size + header.get_size_hint(), + } + } +} + +impl Deserializable for OutputNote { + fn read_from(source: &mut R) -> Result { + match source.read_u8()? { + Self::PUBLIC => Ok(Self::Public(PublicOutputNote::read_from(source)?)), + Self::PRIVATE => Ok(Self::Private(PrivateNoteHeader::read_from(source)?)), + v => Err(DeserializationError::InvalidValue(format!( + "invalid proven output note type: {v}" + ))), + } + } +} + +// PUBLIC OUTPUT NOTE +// ================================================================================================ + +/// A public output note with enforced size limits. +/// +/// This struct wraps a [`Note`] and guarantees that: +/// - The note is public (not private). +/// - The serialized size does not exceed [`NOTE_MAX_SIZE`]. +/// +/// This type is used in [`OutputNote::Public`] to ensure that all public notes in proven +/// transactions meet the protocol's size requirements. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PublicOutputNote(Note); + +impl PublicOutputNote { + /// Creates a new [`PublicOutputNote`] from the given note. + /// + /// # Errors + /// Returns an error if: + /// - The note is private. + /// - The serialized size exceeds [`NOTE_MAX_SIZE`]. + pub fn new(mut note: Note) -> Result { + // Ensure the note is public + if note.metadata().is_private() { + return Err(OutputNoteError::NoteIsPrivate(note.id())); + } + + // Strip decorators from the note script + note.minify_script(); + + // Check the size limit after stripping decorators + let note_size = note.get_size_hint(); + if note_size > NOTE_MAX_SIZE as usize { + return Err(OutputNoteError::NoteSizeLimitExceeded { note_id: note.id(), note_size }); + } + + Ok(Self(note)) + } + + /// Returns the unique identifier of this note. + pub fn id(&self) -> NoteId { + self.0.id() + } + + /// Returns the note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + self.0.metadata() + } + + /// Returns the note's assets. + pub fn assets(&self) -> &NoteAssets { + self.0.assets() + } + + /// Returns the note's recipient. + pub fn recipient(&self) -> &NoteRecipient { + self.0.recipient() + } + + /// Returns the note's header. + pub fn header(&self) -> &NoteHeader { + self.0.header() + } + + /// Returns a reference to the underlying note. + pub fn as_note(&self) -> &Note { + &self.0 + } + + /// Consumes this wrapper and returns the underlying note. + pub fn into_note(self) -> Note { + self.0 + } +} + +impl Serializable for PublicOutputNote { + fn write_into(&self, target: &mut W) { + self.0.write_into(target); + } + + fn get_size_hint(&self) -> usize { + self.0.get_size_hint() + } +} + +impl Deserializable for PublicOutputNote { + fn read_from(source: &mut R) -> Result { + let note = Note::read_from(source)?; + Self::new(note).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + +// PRIVATE NOTE HEADER +// ================================================================================================ + +/// A [NoteHeader] of a private note. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PrivateNoteHeader(NoteHeader); + +impl PrivateNoteHeader { + /// Creates a new [`PrivateNoteHeader`] from the given note header. + /// + /// # Errors + /// Returns an error if: + /// - The provided header is for a public note. + pub fn new(header: NoteHeader) -> Result { + if !header.metadata().is_private() { + return Err(OutputNoteError::NoteIsPublic(header.id())); + } + + Ok(Self(header)) + } + + /// Returns the note's identifier. + /// + /// The [NoteId] value is both an unique identifier and a commitment to the note. + pub fn id(&self) -> NoteId { + self.0.id() + } + + /// Returns the note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + self.0.metadata() + } + + /// Consumes self and returns the note header's metadata. + pub fn into_metadata(self) -> NoteMetadata { + self.0.into_metadata() + } + + /// Returns a commitment to the note and its metadata. + /// + /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) + /// + /// This value is used primarily for authenticating notes consumed when they are consumed + /// in a transaction. + pub fn commitment(&self) -> Word { + self.0.commitment() + } + + /// Returns a reference to the underlying note header. + pub fn as_header(&self) -> &NoteHeader { + &self.0 + } + + /// Consumes this wrapper and returns the underlying note header. + pub fn into_header(self) -> NoteHeader { + self.0 + } +} + +impl Serializable for PrivateNoteHeader { + fn write_into(&self, target: &mut W) { + self.0.write_into(target); + } + + fn get_size_hint(&self) -> usize { + self.0.get_size_hint() + } +} + +impl Deserializable for PrivateNoteHeader { + fn read_from(source: &mut R) -> Result { + let header = NoteHeader::read_from(source)?; + Self::new(header).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} diff --git a/crates/miden-protocol/src/transaction/outputs/tests.rs b/crates/miden-protocol/src/transaction/outputs/tests.rs new file mode 100644 index 0000000000..66fed60ff9 --- /dev/null +++ b/crates/miden-protocol/src/transaction/outputs/tests.rs @@ -0,0 +1,145 @@ +use alloc::sync::Arc; + +use assert_matches::assert_matches; + +use super::{PublicOutputNote, RawOutputNote, RawOutputNotes}; +use crate::account::AccountId; +use crate::assembly::mast::{ExternalNodeBuilder, MastForest, MastForestContributor}; +use crate::asset::FungibleAsset; +use crate::constants::NOTE_MAX_SIZE; +use crate::errors::{OutputNoteError, TransactionOutputError}; +use crate::note::{ + Note, + NoteAssets, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use crate::testing::account_id::{ + ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_SENDER, +}; +use crate::utils::serde::Serializable; +use crate::{Felt, Word}; + +#[test] +fn test_duplicate_output_notes() -> anyhow::Result<()> { + let mock_note = Note::mock_noop(Word::empty()); + let mock_note_id = mock_note.id(); + let mock_note_clone = mock_note.clone(); + + let error = RawOutputNotes::new(vec![ + RawOutputNote::Full(mock_note), + RawOutputNote::Full(mock_note_clone), + ]) + .expect_err("input notes creation should fail"); + + assert_matches!(error, TransactionOutputError::DuplicateOutputNote(note_id) if note_id == mock_note_id); + + Ok(()) +} + +#[test] +fn output_note_size_hint_matches_serialized_length() -> anyhow::Result<()> { + let sender_id = ACCOUNT_ID_SENDER.try_into().unwrap(); + + // Build a note with at least two assets. + let faucet_id_1 = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); + let faucet_id_2 = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); + + let asset_1 = FungibleAsset::new(faucet_id_1, 100)?.into(); + let asset_2 = FungibleAsset::new(faucet_id_2, 200)?.into(); + + let assets = NoteAssets::new(vec![asset_1, asset_2])?; + + // Build metadata similarly to how mock notes are constructed. + let metadata = NoteMetadata::new(sender_id, NoteType::Private) + .with_tag(NoteTag::with_account_target(sender_id)); + + // Build storage with at least two values. + let storage = NoteStorage::new(vec![Felt::new(1), Felt::new(2)])?; + + let serial_num = Word::empty(); + let script = NoteScript::mock(); + let recipient = NoteRecipient::new(serial_num, script, storage); + + let note = Note::new(assets, metadata, recipient); + let output_note = RawOutputNote::Full(note); + + let bytes = output_note.to_bytes(); + + assert_eq!(bytes.len(), output_note.get_size_hint()); + + Ok(()) +} + +// Construct a public note whose serialized size exceeds NOTE_MAX_SIZE by building +// a MastForest with many external nodes. External nodes carry no debug info, so +// `minify_script()` (called inside `PublicOutputNote::new()`) cannot shrink them. +#[test] +fn oversized_public_note_triggers_size_limit_error() -> anyhow::Result<()> { + let sender_id = ACCOUNT_ID_SENDER.try_into().unwrap(); + + // Build a large MastForest by adding many external nodes. Each node stores a + // 32-byte digest; 7000 nodes comfortably exceed the 256 KiB limit. + let mut mast = MastForest::new(); + let mut root_id = None; + for i in 0..7_000_u16 { + let digest = Word::new([Felt::from(i + 1), Felt::ZERO, Felt::ZERO, Felt::ZERO]); + let id = ExternalNodeBuilder::new(digest) + .add_to_forest(&mut mast) + .expect("adding external node should not fail"); + root_id = Some(id); + } + let root_id = root_id.unwrap(); + mast.make_root(root_id); + + let script = NoteScript::from_parts(Arc::new(mast), root_id); + + let serial_num = Word::empty(); + let storage = NoteStorage::new(alloc::vec::Vec::new())?; + + // Create a public note (NoteType::Public is required for PublicOutputNote) + let faucet_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); + let asset = FungibleAsset::new(faucet_id, 100)?.into(); + let assets = NoteAssets::new(vec![asset])?; + + let metadata = NoteMetadata::new(sender_id, NoteType::Public) + .with_tag(NoteTag::with_account_target(sender_id)); + + let recipient = NoteRecipient::new(serial_num, script, storage); + let oversized_note = Note::new(assets, metadata, recipient); + + // Sanity-check that our constructed note is indeed larger than the configured + // maximum. + let computed_note_size = oversized_note.get_size_hint(); + assert!( + computed_note_size > NOTE_MAX_SIZE as usize, + "Expected note size ({computed_note_size}) to exceed NOTE_MAX_SIZE ({NOTE_MAX_SIZE})" + ); + + // Creating a PublicOutputNote should fail with size limit error + let result = PublicOutputNote::new(oversized_note.clone()); + + assert_matches!( + result, + Err(OutputNoteError::NoteSizeLimitExceeded { note_id: _, note_size }) + if note_size > NOTE_MAX_SIZE as usize + ); + + // to_output_note() should also fail + let output_note = RawOutputNote::Full(oversized_note); + let result = output_note.to_output_note(); + + assert_matches!( + result, + Err(OutputNoteError::NoteSizeLimitExceeded { note_id: _, note_size }) + if note_size > NOTE_MAX_SIZE as usize + ); + + Ok(()) +} diff --git a/crates/miden-protocol/src/transaction/partial_blockchain.rs b/crates/miden-protocol/src/transaction/partial_blockchain.rs index a7b2cbf491..bb424c576b 100644 --- a/crates/miden-protocol/src/transaction/partial_blockchain.rs +++ b/crates/miden-protocol/src/transaction/partial_blockchain.rs @@ -281,7 +281,6 @@ impl Default for PartialBlockchain { #[cfg(test)] mod tests { use assert_matches::assert_matches; - use miden_core::utils::{Deserializable, Serializable}; use rand::SeedableRng; use rand_chacha::ChaCha20Rng; @@ -293,6 +292,7 @@ mod tests { use crate::crypto::merkle::mmr::{Mmr, PartialMmr}; use crate::errors::PartialBlockchainError; use crate::testing::account_id::ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_partial_blockchain_add() { @@ -353,7 +353,7 @@ mod tests { let mut partial_mmr = PartialMmr::from_peaks(mmr.peaks()); for i in 0..3 { partial_mmr - .track(i, mmr.get(i).unwrap(), &mmr.open(i).unwrap().merkle_path) + .track(i, mmr.get(i).unwrap(), mmr.open(i).unwrap().merkle_path()) .unwrap(); } @@ -403,7 +403,7 @@ mod tests { let mut partial_mmr = PartialMmr::from_peaks(mmr.peaks()); partial_mmr - .track(1, block_header1.commitment(), &mmr.open(1).unwrap().merkle_path) + .track(1, block_header1.commitment(), mmr.open(1).unwrap().merkle_path()) .unwrap(); let error = @@ -470,7 +470,7 @@ mod tests { for i in 0..total_blocks { let i: usize = i as usize; partial_mmr - .track(i, full_mmr.get(i).unwrap(), &full_mmr.open(i).unwrap().merkle_path) + .track(i, full_mmr.get(i).unwrap(), full_mmr.open(i).unwrap().merkle_path()) .unwrap(); } let mut chain = PartialBlockchain::new(partial_mmr, headers).unwrap(); diff --git a/crates/miden-protocol/src/transaction/proven_tx.rs b/crates/miden-protocol/src/transaction/proven_tx.rs index afe974bd3f..5775902cc7 100644 --- a/crates/miden-protocol/src/transaction/proven_tx.rs +++ b/crates/miden-protocol/src/transaction/proven_tx.rs @@ -71,6 +71,69 @@ pub struct ProvenTransaction { } impl ProvenTransaction { + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- + + /// Creates a new [ProvenTransaction] from the specified components. + /// + /// # Errors + /// + /// Returns an error if: + /// - The total number of input notes is greater than + /// [`MAX_INPUT_NOTES_PER_TX`](crate::constants::MAX_INPUT_NOTES_PER_TX). + /// - The vector of input notes contains duplicates. + /// - The total number of output notes is greater than + /// [`MAX_OUTPUT_NOTES_PER_TX`](crate::constants::MAX_OUTPUT_NOTES_PER_TX). + /// - The vector of output notes contains duplicates. + /// - The transaction is empty, which is the case if the account state is unchanged or the + /// number of input notes is zero. + /// - The commitment computed on the actual account delta contained in [`TxAccountUpdate`] does + /// not match its declared account delta commitment. + pub fn new( + account_update: TxAccountUpdate, + input_notes: impl IntoIterator>, + output_notes: impl IntoIterator>, + ref_block_num: BlockNumber, + ref_block_commitment: Word, + fee: FungibleAsset, + expiration_block_num: BlockNumber, + proof: ExecutionProof, + ) -> Result { + let input_notes: Vec = + input_notes.into_iter().map(Into::into).collect(); + let output_notes: Vec = output_notes.into_iter().map(Into::into).collect(); + + let input_notes = + InputNotes::new(input_notes).map_err(ProvenTransactionError::InputNotesError)?; + let output_notes = + OutputNotes::new(output_notes).map_err(ProvenTransactionError::OutputNotesError)?; + + let id = TransactionId::new( + account_update.initial_state_commitment(), + account_update.final_state_commitment(), + input_notes.commitment(), + output_notes.commitment(), + fee, + ); + + let proven_transaction = Self { + id, + account_update, + input_notes, + output_notes, + ref_block_num, + ref_block_commitment, + fee, + expiration_block_num, + proof, + }; + + proven_transaction.validate() + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + /// Returns unique identifier of this transaction. pub fn id(&self) -> TransactionId { self.id @@ -218,6 +281,7 @@ impl Deserializable for ProvenTransaction { account_update.final_state_commitment(), input_notes.commitment(), output_notes.commitment(), + fee, ); let proven_transaction = Self { @@ -238,171 +302,6 @@ impl Deserializable for ProvenTransaction { } } -// PROVEN TRANSACTION BUILDER -// ================================================================================================ - -/// Builder for a proven transaction. -#[derive(Clone, Debug)] -pub struct ProvenTransactionBuilder { - /// ID of the account that the transaction was executed against. - account_id: AccountId, - - /// The commitment of the account before the transaction was executed. - initial_account_commitment: Word, - - /// The commitment of the account after the transaction was executed. - final_account_commitment: Word, - - /// The commitment of the account delta produced by the transaction. - account_delta_commitment: Word, - - /// State changes to the account due to the transaction. - account_update_details: AccountUpdateDetails, - - /// List of [InputNoteCommitment]s of all consumed notes by the transaction. - input_notes: Vec, - - /// List of [OutputNote]s of all notes created by the transaction. - output_notes: Vec, - - /// [`BlockNumber`] of the transaction's reference block. - ref_block_num: BlockNumber, - - /// Block digest of the transaction's reference block. - ref_block_commitment: Word, - - /// The fee of the transaction. - fee: FungibleAsset, - - /// The block number by which the transaction will expire, as defined by the executed scripts. - expiration_block_num: BlockNumber, - - /// A STARK proof that attests to the correct execution of the transaction. - proof: ExecutionProof, -} - -impl ProvenTransactionBuilder { - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns a [ProvenTransactionBuilder] used to build a [ProvenTransaction]. - #[allow(clippy::too_many_arguments)] - pub fn new( - account_id: AccountId, - initial_account_commitment: Word, - final_account_commitment: Word, - account_delta_commitment: Word, - ref_block_num: BlockNumber, - ref_block_commitment: Word, - fee: FungibleAsset, - expiration_block_num: BlockNumber, - proof: ExecutionProof, - ) -> Self { - Self { - account_id, - initial_account_commitment, - final_account_commitment, - account_delta_commitment, - account_update_details: AccountUpdateDetails::Private, - input_notes: Vec::new(), - output_notes: Vec::new(), - ref_block_num, - ref_block_commitment, - fee, - expiration_block_num, - proof, - } - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Sets the account's update details. - pub fn account_update_details(mut self, details: AccountUpdateDetails) -> Self { - self.account_update_details = details; - self - } - - /// Add notes consumed by the transaction. - pub fn add_input_notes(mut self, notes: I) -> Self - where - I: IntoIterator, - T: Into, - { - self.input_notes.extend(notes.into_iter().map(|note| note.into())); - self - } - - /// Add notes produced by the transaction. - pub fn add_output_notes(mut self, notes: T) -> Self - where - T: IntoIterator, - { - self.output_notes.extend(notes); - self - } - - /// Builds the [`ProvenTransaction`]. - /// - /// # Errors - /// - /// Returns an error if: - /// - The total number of input notes is greater than - /// [`MAX_INPUT_NOTES_PER_TX`](crate::constants::MAX_INPUT_NOTES_PER_TX). - /// - The vector of input notes contains duplicates. - /// - The total number of output notes is greater than - /// [`MAX_OUTPUT_NOTES_PER_TX`](crate::constants::MAX_OUTPUT_NOTES_PER_TX). - /// - The vector of output notes contains duplicates. - /// - The transaction is empty, which is the case if the account state is unchanged or the - /// number of input notes is zero. - /// - The commitment computed on the actual account delta contained in [`TxAccountUpdate`] does - /// not match its declared account delta commitment. - /// - The size of the serialized account update exceeds [`ACCOUNT_UPDATE_MAX_SIZE`]. - /// - The transaction was executed against a _new_ account with public state and its account ID - /// does not match the ID in the account update. - /// - The transaction was executed against a _new_ account with public state and its commitment - /// does not match the final state commitment of the account update. - /// - The transaction creates a _new_ account with public state and the update is of type - /// [`AccountUpdateDetails::Delta`] but the account delta is not a full state delta. - /// - The transaction was executed against a private account and the account update is _not_ of - /// type [`AccountUpdateDetails::Private`]. - /// - The transaction was executed against an account with public state and the update is of - /// type [`AccountUpdateDetails::Private`]. - pub fn build(self) -> Result { - let input_notes = - InputNotes::new(self.input_notes).map_err(ProvenTransactionError::InputNotesError)?; - let output_notes = OutputNotes::new(self.output_notes) - .map_err(ProvenTransactionError::OutputNotesError)?; - let id = TransactionId::new( - self.initial_account_commitment, - self.final_account_commitment, - input_notes.commitment(), - output_notes.commitment(), - ); - let account_update = TxAccountUpdate::new( - self.account_id, - self.initial_account_commitment, - self.final_account_commitment, - self.account_delta_commitment, - self.account_update_details, - )?; - - let proven_transaction = ProvenTransaction { - id, - account_update, - input_notes, - output_notes, - ref_block_num: self.ref_block_num, - ref_block_commitment: self.ref_block_commitment, - fee: self.fee, - expiration_block_num: self.expiration_block_num, - proof: self.proof, - }; - - proven_transaction.validate() - } -} - // TRANSACTION ACCOUNT UPDATE // ================================================================================================ @@ -599,6 +498,16 @@ pub struct InputNoteCommitment { } impl InputNoteCommitment { + /// Returns a new [InputNoteCommitment] instantiated from the provided nullifier and optional + /// note header. + /// + /// Note: this method does not validate that the provided nullifier and header are consistent + /// with each other (i.e., it does not check that the nullifier was derived from the note + /// referenced by the header). + pub fn from_parts_unchecked(nullifier: Nullifier, header: Option) -> Self { + Self { nullifier, header } + } + /// Returns the nullifier of the input note committed to by this commitment. pub fn nullifier(&self) -> Nullifier { self.nullifier @@ -684,11 +593,11 @@ impl Deserializable for InputNoteCommitment { #[cfg(test)] mod tests { use alloc::collections::BTreeMap; + use alloc::vec::Vec; use anyhow::Context; - use miden_core::utils::Deserializable; + use miden_crypto::rand::test_utils::rand_value; use miden_verifier::ExecutionProof; - use winter_rand_utils::rand_value; use super::ProvenTransaction; use crate::account::delta::AccountUpdateDetails; @@ -702,6 +611,7 @@ mod tests { AccountType, AccountVaultDelta, StorageMapDelta, + StorageMapKey, StorageSlotName, }; use crate::asset::FungibleAsset; @@ -713,8 +623,8 @@ mod tests { }; use crate::testing::add_component::AddComponent; use crate::testing::noop_auth_component::NoopAuthComponent; - use crate::transaction::{ProvenTransactionBuilder, TxAccountUpdate}; - use crate::utils::Serializable; + use crate::transaction::{InputNoteCommitment, OutputNote, TxAccountUpdate}; + use crate::utils::serde::{Deserializable, Serializable}; use crate::{ACCOUNT_UPDATE_MAX_SIZE, EMPTY_WORD, LexicographicWord, ONE, Word}; fn check_if_sync() {} @@ -767,7 +677,10 @@ mod tests { // 32 bytes in size. let required_entries = ACCOUNT_UPDATE_MAX_SIZE / (2 * 32); for _ in 0..required_entries { - map.insert(LexicographicWord::new(rand_value::()), rand_value::()); + map.insert( + LexicographicWord::new(StorageMapKey::from_raw(rand_value())), + rand_value::(), + ); } let storage_delta = StorageMapDelta::new(map); @@ -812,18 +725,25 @@ mod tests { let expiration_block_num = BlockNumber::from(2); let proof = ExecutionProof::new_dummy(); - let tx = ProvenTransactionBuilder::new( + let account_update = TxAccountUpdate::new( account_id, initial_account_commitment, final_account_commitment, account_delta_commitment, + AccountUpdateDetails::Private, + ) + .context("failed to build account update")?; + + let tx = ProvenTransaction::new( + account_update, + Vec::::new(), + Vec::::new(), ref_block_num, ref_block_commitment, FungibleAsset::mock(42).unwrap_fungible(), expiration_block_num, proof, ) - .build() .context("failed to build proven transaction")?; let deserialized = ProvenTransaction::read_from_bytes(&tx.to_bytes()).unwrap(); diff --git a/crates/miden-protocol/src/transaction/transaction_id.rs b/crates/miden-protocol/src/transaction/transaction_id.rs index e75f7662f7..ddf14c7532 100644 --- a/crates/miden-protocol/src/transaction/transaction_id.rs +++ b/crates/miden-protocol/src/transaction/transaction_id.rs @@ -4,6 +4,7 @@ use core::fmt::{Debug, Display}; use miden_protocol_macros::WordWrapper; use super::{Felt, Hasher, ProvenTransaction, WORD_SIZE, Word, ZERO}; +use crate::asset::{Asset, FungibleAsset}; use crate::utils::serde::{ ByteReader, ByteWriter, @@ -19,8 +20,13 @@ use crate::utils::serde::{ /// /// Transaction ID is computed as: /// -/// hash(init_account_commitment, final_account_commitment, input_notes_commitment, -/// output_notes_commitment) +/// hash( +/// INIT_ACCOUNT_COMMITMENT, +/// FINAL_ACCOUNT_COMMITMENT, +/// INPUT_NOTES_COMMITMENT, +/// OUTPUT_NOTES_COMMITMENT, +/// FEE_ASSET, +/// ) /// /// This achieves the following properties: /// - Transactions are identical if and only if they have the same ID. @@ -35,12 +41,14 @@ impl TransactionId { final_account_commitment: Word, input_notes_commitment: Word, output_notes_commitment: Word, + fee_asset: FungibleAsset, ) -> Self { - let mut elements = [ZERO; 4 * WORD_SIZE]; + let mut elements = [ZERO; 6 * WORD_SIZE]; elements[..4].copy_from_slice(init_account_commitment.as_elements()); elements[4..8].copy_from_slice(final_account_commitment.as_elements()); elements[8..12].copy_from_slice(input_notes_commitment.as_elements()); - elements[12..].copy_from_slice(output_notes_commitment.as_elements()); + elements[12..16].copy_from_slice(output_notes_commitment.as_elements()); + elements[16..].copy_from_slice(&Asset::from(fee_asset).as_elements()); Self(Hasher::hash_elements(&elements)) } } @@ -67,6 +75,7 @@ impl From<&ProvenTransaction> for TransactionId { tx.account_update().final_state_commitment(), tx.input_notes().commitment(), tx.output_notes().commitment(), + tx.fee(), ) } } diff --git a/crates/miden-protocol/src/transaction/tx_args.rs b/crates/miden-protocol/src/transaction/tx_args.rs index ea5978e18f..1e6657bfaf 100644 --- a/crates/miden-protocol/src/transaction/tx_args.rs +++ b/crates/miden-protocol/src/transaction/tx_args.rs @@ -2,8 +2,8 @@ use alloc::collections::BTreeMap; use alloc::sync::Arc; use alloc::vec::Vec; +use miden_core::mast::MastNodeExt; use miden_crypto::merkle::InnerNodeInfo; -use miden_processor::MastNodeExt; use super::{Felt, Hasher, Word}; use crate::account::auth::{PublicKeyCommitment, Signature}; @@ -361,10 +361,10 @@ impl Deserializable for TransactionScript { #[cfg(test)] mod tests { - use miden_core::AdviceMap; - use miden_core::utils::{Deserializable, Serializable}; + use miden_core::advice::AdviceMap; use crate::transaction::TransactionArgs; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_tx_args_serialization() { diff --git a/crates/miden-protocol/src/transaction/tx_header.rs b/crates/miden-protocol/src/transaction/tx_header.rs index 0b17e5b4ce..23a2721e88 100644 --- a/crates/miden-protocol/src/transaction/tx_header.rs +++ b/crates/miden-protocol/src/transaction/tx_header.rs @@ -1,7 +1,5 @@ use alloc::vec::Vec; -use miden_processor::DeserializationError; - use crate::Word; use crate::asset::FungibleAsset; use crate::note::NoteHeader; @@ -10,20 +8,26 @@ use crate::transaction::{ ExecutedTransaction, InputNoteCommitment, InputNotes, - OutputNote, - OutputNotes, ProvenTransaction, + RawOutputNotes, TransactionId, }; -use crate::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A transaction header derived from a /// [`ProvenTransaction`](crate::transaction::ProvenTransaction). /// -/// The header is essentially a direct copy of the transaction's commitments, in particular the -/// initial and final account state commitment as well as all nullifiers of consumed notes and all -/// note IDs of created notes. While account updates may be aggregated and notes may be erased as -/// part of batch and block building, the header retains the original transaction's data. +/// The header is essentially a direct copy of the transaction's public commitments, in particular +/// the initial and final account state commitment as well as all nullifiers of consumed notes and +/// all note IDs of created notes together with the fee asset. While account updates may be +/// aggregated and notes may be erased as part of batch and block building, the header retains the +/// original transaction's data. #[derive(Debug, Clone, PartialEq, Eq)] pub struct TransactionHeader { id: TransactionId, @@ -41,7 +45,8 @@ impl TransactionHeader { /// Constructs a new [`TransactionHeader`] from the provided parameters. /// - /// The [`TransactionId`] is computed from the provided parameters. + /// The [`TransactionId`] is computed from the provided parameters, committing to the initial + /// and final account commitments, input and output note commitments, and the fee asset. /// /// The input notes and output notes must be in the same order as they appeared in the /// transaction that this header represents, otherwise an incorrect ID will be computed. @@ -57,13 +62,14 @@ impl TransactionHeader { fee: FungibleAsset, ) -> Self { let input_notes_commitment = input_notes.commitment(); - let output_notes_commitment = OutputNotes::compute_commitment(output_notes.iter()); + let output_notes_commitment = RawOutputNotes::compute_commitment(output_notes.iter()); let id = TransactionId::new( initial_state_commitment, final_state_commitment, input_notes_commitment, output_notes_commitment, + fee, ); Self { @@ -167,7 +173,7 @@ impl From<&ProvenTransaction> for TransactionHeader { tx.account_update().initial_state_commitment(), tx.account_update().final_state_commitment(), tx.input_notes().clone(), - tx.output_notes().iter().map(OutputNote::header).cloned().collect(), + tx.output_notes().iter().map(<&NoteHeader>::from).cloned().collect(), tx.fee(), ) } @@ -182,7 +188,7 @@ impl From<&ExecutedTransaction> for TransactionHeader { tx.initial_account().initial_commitment(), tx.final_account().to_commitment(), tx.input_notes().to_commitments(), - tx.output_notes().iter().map(OutputNote::header).cloned().collect(), + tx.output_notes().iter().map(|n| n.header().clone()).collect(), tx.fee(), ) } diff --git a/crates/miden-protocol/src/transaction/tx_summary.rs b/crates/miden-protocol/src/transaction/tx_summary.rs index b2fcf0c0af..f02342e456 100644 --- a/crates/miden-protocol/src/transaction/tx_summary.rs +++ b/crates/miden-protocol/src/transaction/tx_summary.rs @@ -2,8 +2,14 @@ use alloc::vec::Vec; use crate::account::AccountDelta; use crate::crypto::SequentialCommit; -use crate::transaction::{InputNote, InputNotes, OutputNotes}; -use crate::utils::{Deserializable, Serializable}; +use crate::transaction::{InputNote, InputNotes, RawOutputNotes}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Word}; /// The summary of the changes that result from executing a transaction. @@ -14,7 +20,7 @@ use crate::{Felt, Word}; pub struct TransactionSummary { account_delta: AccountDelta, input_notes: InputNotes, - output_notes: OutputNotes, + output_notes: RawOutputNotes, salt: Word, } @@ -26,7 +32,7 @@ impl TransactionSummary { pub fn new( account_delta: AccountDelta, input_notes: InputNotes, - output_notes: OutputNotes, + output_notes: RawOutputNotes, salt: Word, ) -> Self { Self { @@ -51,7 +57,7 @@ impl TransactionSummary { } /// Returns the output notes of this transaction summary. - pub fn output_notes(&self) -> &OutputNotes { + pub fn output_notes(&self) -> &RawOutputNotes { &self.output_notes } @@ -82,7 +88,7 @@ impl SequentialCommit for TransactionSummary { } impl Serializable for TransactionSummary { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.account_delta.write_into(target); self.input_notes.write_into(target); self.output_notes.write_into(target); @@ -91,9 +97,7 @@ impl Serializable for TransactionSummary { } impl Deserializable for TransactionSummary { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let account_delta = source.read()?; let input_notes = source.read()?; let output_notes = source.read()?; diff --git a/crates/miden-standards/asm/account_components/access/ownable2step.masm b/crates/miden-standards/asm/account_components/access/ownable2step.masm new file mode 100644 index 0000000000..0f7b7dd2bd --- /dev/null +++ b/crates/miden-standards/asm/account_components/access/ownable2step.masm @@ -0,0 +1,9 @@ +# The MASM code of the Ownable2Step Account Component. +# +# See the `Ownable2Step` Rust type's documentation for more details. + +pub use ::miden::standards::access::ownable2step::get_owner +pub use ::miden::standards::access::ownable2step::get_nominated_owner +pub use ::miden::standards::access::ownable2step::transfer_ownership +pub use ::miden::standards::access::ownable2step::accept_ownership +pub use ::miden::standards::access::ownable2step::renounce_ownership diff --git a/crates/miden-standards/asm/account_components/auth/multisig.masm b/crates/miden-standards/asm/account_components/auth/multisig.masm index 3b10b36c66..5e698bc886 100644 --- a/crates/miden-standards/asm/account_components/auth/multisig.masm +++ b/crates/miden-standards/asm/account_components/auth/multisig.masm @@ -2,616 +2,27 @@ # # See the `AuthMultisig` Rust type's documentation for more details. -use miden::protocol::active_account -use miden::protocol::auth::AUTH_UNAUTHORIZED_EVENT -use miden::protocol::native_account -use miden::standards::auth -use miden::core::word +use miden::standards::auth::multisig -# Local Memory Addresses -const IS_SIGNER_FOUND_LOC=0 -const CURRENT_SIGNER_INDEX_LOC=1 +pub use multisig::update_signers_and_threshold +pub use multisig::get_threshold_and_num_approvers +pub use multisig::set_procedure_threshold +pub use multisig::get_signer_at +pub use multisig::is_signer -const NEW_NUM_OF_APPROVERS_LOC=0 -const INIT_NUM_OF_APPROVERS_LOC=1 - -const DEFAULT_THRESHOLD_LOC=0 - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - -# CONSTANTS -# ================================================================================================= - -# Storage Slots -# -# This authentication component uses named storage slots. -# - THRESHOLD_CONFIG_SLOT: -# [default_threshold, num_approvers, 0, 0] -# -# - APPROVER_PUBLIC_KEYS_SLOT (map): -# APPROVER_MAP_KEY => APPROVER_PUBLIC_KEY -# where APPROVER_MAP_KEY = [key_index, 0, 0, 0] -# -# - APPROVER_SCHEME_ID_SLOT (map): -# APPROVER_MAP_KEY => [scheme_id, 0, 0, 0] -# where APPROVER_MAP_KEY = [key_index, 0, 0, 0] -# -# - EXECUTED_TXS_SLOT (map): -# TRANSACTION_MESSAGE => [is_executed, 0, 0, 0] -# -# - PROC_THRESHOLD_ROOTS_SLOT (map): -# PROC_ROOT => [proc_threshold, 0, 0, 0] - - -# The slot in this component's storage layout where the default signature threshold and -# number of approvers are stored as: -# [default_threshold, num_approvers, 0, 0]. -# The threshold is guaranteed to be less than or equal to num_approvers. -const THRESHOLD_CONFIG_SLOT = word("miden::standards::auth::multisig::threshold_config") - -# The slot in this component's storage layout where the public keys map is stored. -# Map entries: [key_index, 0, 0, 0] => APPROVER_PUBLIC_KEY -const APPROVER_PUBLIC_KEYS_SLOT = word("miden::standards::auth::multisig::approver_public_keys") - -# The slot in this component's storage layout where the scheme id for the corresponding public keys map is stored. -# Map entries: [key_index, 0, 0, 0] => [scheme_id, 0, 0, 0] -const APPROVER_SCHEME_ID_SLOT = word("miden::standards::auth::multisig::approver_schemes") - -# The slot in this component's storage layout where executed transactions are stored. -# Map entries: transaction_message => [is_executed, 0, 0, 0] -const EXECUTED_TXS_SLOT = word("miden::standards::auth::multisig::executed_transactions") - -# The slot in this component's storage layout where procedure thresholds are stored. -# Map entries: PROC_ROOT => [proc_threshold, 0, 0, 0] -const PROC_THRESHOLD_ROOTS_SLOT = word("miden::standards::auth::multisig::procedure_thresholds") - -# Executed Transaction Flag Constant -const IS_EXECUTED_FLAG = [1, 0, 0, 0] - -# ERRORS -# ================================================================================================= - -const ERR_TX_ALREADY_EXECUTED = "failed to approve multisig transaction as it was already executed" - -const ERR_MALFORMED_MULTISIG_CONFIG = "number of approvers must be equal to or greater than threshold" - -const ERR_ZERO_IN_MULTISIG_CONFIG = "number of approvers or threshold must not be zero" - -const ERR_APPROVER_COUNTS_NOT_U32 = "initial and new number of approvers must be u32" - -const ERR_SIGNER_INDEX_NOT_U32 = "signer index must be u32" - -#! Check if transaction has already been executed and add it to executed transactions for replay protection. -#! -#! Inputs: [MSG] -#! Outputs: [] -#! -#! Panics if: -#! - the same transaction has already been executed -proc assert_new_tx(msg: BeWord) - push.IS_EXECUTED_FLAG - # => [[0, 0, 0, is_executed], MSG] - - swapw - # => [MSG, IS_EXECUTED_FLAG] - - push.EXECUTED_TXS_SLOT[0..2] - # => [txs_slot_prefix, txs_slot_suffix, MSG, IS_EXECUTED_FLAG] - - # Set the key value pair in the map to mark transaction as executed - exec.native_account::set_map_item - # => [[0, 0, 0, is_executed]] - - drop drop drop - # => [is_executed] - - assertz.err=ERR_TX_ALREADY_EXECUTED - # => [] -end - -#! Remove old approver public keys and the corresponding scheme ids -#! from the approver public key and scheme id mappings. -#! -#! This procedure cleans up the storage by removing public keys and signature scheme ids of approvers -#! that are no longer part of the multisig configuration. -#! -#! Inputs: [init_num_of_approvers, new_num_of_approvers] -#! Outputs: [] -#! -#! Where: -#! - init_num_of_approvers is the original number of approvers before the update -#! - new_num_of_approvers is the new number of approvers after the update -#! -#! Panics if: -#! - init_num_of_approvers is not a u32 value. -#! - new_num_of_approvers is not a u32 value. -proc cleanup_pubkey_and_scheme_id_mapping(init_num_of_approvers: u32, new_num_of_approvers: u32) - dup.1 dup.1 - u32assert2.err=ERR_APPROVER_COUNTS_NOT_U32 - u32lt - # => [should_loop, i = init_num_of_approvers, new_num_of_approvers] - - while.true - # => [i, new_num_of_approvers] - - sub.1 - # => [i-1, new_num_of_approvers] - - # clear scheme id at APPROVER_MAP_KEY(i-1) - dup exec.create_approver_map_key - # => [APPROVER_MAP_KEY, i-1, new_num_of_approvers] - - padw swapw - # => [APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] - - push.APPROVER_SCHEME_ID_SLOT[0..2] - # => [scheme_id_slot_prefix, scheme_id_slot_suffix, APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, new_num_of_approvers] - - dropw - # => [i-1, new_num_of_approvers] - - # clear public key at APPROVER_MAP_KEY(i-1) - dup exec.create_approver_map_key - # => [APPROVER_MAP_KEY, i-1, new_num_of_approvers] - - padw swapw - # => [APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, new_num_of_approvers] - - dropw - # => [i-1, new_num_of_approvers] - - dup.1 dup.1 - u32lt - # => [should_loop, i-1, new_num_of_approvers] - end - - drop drop -end - -#! Builds the storage map key for a signer index. -#! -#! Inputs: [key_index] -#! Outputs: [APPROVER_MAP_KEY] -proc create_approver_map_key(key_index: felt) -> BeWord - push.0.0.0 - # => [APPROVER_MAP_KEY] -end - -#! Update threshold config, add / remove approvers, -#! and update the approver scheme ids -#! -#! Inputs: -#! Operand stack: [MULTISIG_CONFIG_HASH, pad(12)] -#! Advice map: { -#! MULTISIG_CONFIG_HASH => -#! [ -#! CONFIG, -#! PUB_KEY_N, PUB_KEY_N-1, ..., PUB_KEY_0, -#! SCHEME_ID_N, SCHEME_ID_N-1, ..., SCHEME_ID_0 -#! ] -#! } -#! Outputs: -#! Operand stack: [] -#! -#! Where: -#! - MULTISIG_CONFIG_HASH is the hash of the threshold, -#! new public key vector, and the corresponding scheme identifiers -#! - MULTISIG_CONFIG is [threshold, num_approvers, 0, 0] -#! - PUB_KEY_i is the public key of the i-th signer -#! - SCHEME_ID_i is the signature scheme id of the i-th signer -#! -#! Locals: -#! 0: new_num_of_approvers -#! 1: init_num_of_approvers -#! -#! Invocation: call -@locals(2) -pub proc update_signers_and_threshold(multisig_config_hash: BeWord) - adv.push_mapval - # => [MULTISIG_CONFIG_HASH, pad(12)] - - adv_loadw - # => [MULTISIG_CONFIG, pad(12)] - - # store new_num_of_approvers for later - dup.2 loc_store.NEW_NUM_OF_APPROVERS_LOC - # => [MULTISIG_CONFIG, pad(12)] - - dup.3 dup.3 - # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] - - # make sure that the threshold is smaller than the number of approvers - u32assert2.err=ERR_MALFORMED_MULTISIG_CONFIG - u32gt assertz.err=ERR_MALFORMED_MULTISIG_CONFIG - # => [MULTISIG_CONFIG, pad(12)] - - dup.3 dup.3 - # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] - - # make sure that threshold or num_approvers are not zero - eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG - eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG - # => [MULTISIG_CONFIG, pad(12)] - - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [config_slot_prefix, config_slot_suffix, MULTISIG_CONFIG, pad(12)] - - exec.native_account::set_item - # => [OLD_THRESHOLD_CONFIG, pad(12)] - - # store init_num_of_approvers for later - drop drop loc_store.INIT_NUM_OF_APPROVERS_LOC drop - # => [pad(12)] - - loc_load.NEW_NUM_OF_APPROVERS_LOC - # => [num_approvers] - - dup neq.0 - while.true - sub.1 - # => [i-1, pad(12)] - - dup exec.create_approver_map_key - # => [APPROVER_MAP_KEY, i-1, pad(12)] - - padw adv_loadw - # => [PUB_KEY, APPROVER_MAP_KEY, i-1, pad(12)] - - swapw - # => [APPROVER_MAP_KEY, PUB_KEY, i-1, pad(12)] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, APPROVER_MAP_KEY, PUB_KEY, i-1, pad(12)] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, pad(12)] - - # override OLD_VALUE with SCHEME_ID_WORD - adv_loadw - # => [SCHEME_ID_WORD, i-1, pad(12)] - - # validate the scheme id word is in a correct form - exec.auth::signature::assert_supported_scheme_word - # => [SCHEME_ID_WORD, i-1, pad(12)] - - dup.4 exec.create_approver_map_key - # => [APPROVER_MAP_KEY, SCHEME_ID_WORD, i-1, pad(12)] - - push.APPROVER_SCHEME_ID_SLOT[0..2] - # => [scheme_id_slot_prefix, scheme_id_slot_suffix, APPROVER_MAP_KEY, SCHEME_ID_WORD, i-1, pad(12)] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, pad(12)] - - dropw - # => [i-1, pad(12)] - - dup neq.0 - # => [is_non_zero, i-1, pad(12)] - end - # => [pad(13)] - - drop - # => [pad(12)] - - # compare initial vs current multisig config - - # load init_num_of_approvers & new_num_of_approvers - loc_load.NEW_NUM_OF_APPROVERS_LOC loc_load.INIT_NUM_OF_APPROVERS_LOC - # => [init_num_of_approvers, new_num_of_approvers, pad(12)] - - exec.cleanup_pubkey_and_scheme_id_mapping - # => [pad(12)] -end - -# Computes the effective transaction threshold based on called procedures and per-procedure -# overrides stored in PROC_THRESHOLD_ROOTS_SLOT. Falls back to default_threshold if no -# overrides apply. -# -#! Inputs: [default_threshold] -#! Outputs: [transaction_threshold] -@locals(1) -proc compute_transaction_threshold(default_threshold: u32) -> u32 - # 1. initialize transaction_threshold = 0 - # 2. iterate through all account procedures - # a. check if the procedure was called during the transaction - # b. if called, get the override threshold of that procedure from the config map - # c. if proc_threshold > transaction_threshold, set transaction_threshold = proc_threshold - # 3. if transaction_threshold == 0 at the end, revert to using default_threshold - - # store default_threshold for later - loc_store.DEFAULT_THRESHOLD_LOC - # => [] - - # 1. initialize transaction_threshold = 0 - push.0 - # => [transaction_threshold] - - # get the number of account procedures - exec.active_account::get_num_procedures - # => [num_procedures, transaction_threshold] - - # 2. iterate through all account procedures - dup neq.0 - # => [should_continue, num_procedures, transaction_threshold] - while.true - sub.1 dup - # => [num_procedures-1, num_procedures-1, transaction_threshold] - - # get procedure root of the procedure with index i - exec.active_account::get_procedure_root dupw - # => [PROC_ROOT, PROC_ROOT, num_procedures-1, transaction_threshold] - - # 2a. check if this procedure has been called in the transaction - exec.native_account::was_procedure_called - # => [was_called, PROC_ROOT, num_procedures-1, transaction_threshold] - - # if it has been called, get the override threshold of that procedure - if.true - # => [PROC_ROOT, num_procedures-1, transaction_threshold] - - push.PROC_THRESHOLD_ROOTS_SLOT[0..2] - # => [proc_roots_slot_prefix, proc_roots_slot_suffix, PROC_ROOT, num_procedures-1, transaction_threshold] - - # 2b. get the override proc_threshold of that procedure - # if the procedure has no override threshold, the returned map item will be [0, 0, 0, 0] - exec.active_account::get_initial_map_item - # => [[0, 0, 0, proc_threshold], num_procedures-1, transaction_threshold] - - drop drop drop dup dup.3 - # => [transaction_threshold, proc_threshold, proc_threshold, num_procedures-1, transaction_threshold] - - u32assert2.err="transaction threshold or procedure threshold are not u32" - u32gt - # => [is_gt, proc_threshold, num_procedures-1, transaction_threshold] - # 2c. if proc_threshold > transaction_threshold, update transaction_threshold - movup.2 movdn.3 - # => [is_gt, proc_threshold, transaction_threshold, num_procedures-1] - cdrop - # => [updated_transaction_threshold, num_procedures-1] - swap - # => [num_procedures-1, updated_transaction_threshold] - # if it has not been called during this transaction, nothing to do, move to the next procedure - else - dropw - # => [num_procedures-1, transaction_threshold] - end - - dup neq.0 - # => [should_continue, num_procedures-1, transaction_threshold] - end - - drop - # => [transaction_threshold] - - loc_load.DEFAULT_THRESHOLD_LOC - # => [default_threshold, transaction_threshold] - - # 3. if transaction_threshold == 0 at the end, revert to using default_threshold - dup.1 eq.0 - # => [is_zero, default_threshold, transaction_threshold] - - cdrop - # => [effective_transaction_threshold] -end - -#! Returns current num_approvers and the threshold `THRESHOLD_CONFIG_SLOT` -#! Inputs: [] -#! Outputs: [threshold, num_approvers] -#! -#! Invocation: call -pub proc get_threshold_and_num_approvers - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [THRESHOLD_CONFIG_SLOT] - - exec.active_account::get_initial_item - # => [0, 0, num_approvers, threshold] - - drop drop - # => [num_approvers, threshold] - - swap - # => [threshold, num_approvers] -end - -#! Returns signer public key at index i -#! -#! Inputs: [index] -#! Outputs: [PUB_KEY] -#! -#! Panics if: -#! - index is not a u32 value. -#! -#! Invocation: call -pub proc get_signer_at - u32assert.err=ERR_SIGNER_INDEX_NOT_U32 - # => [index] - - exec.create_approver_map_key - # => [APPROVER_MAP_KEY] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [APPROVER_PUBLIC_KEYS_SLOT, APPROVER_MAP_KEY] - - exec.active_account::get_initial_map_item - # => [PUB_KEY] -end - - -#! Returns 1 if PUB_KEY is a current signer, else 0. -#! Inputs: [PUB_KEY] -#! Outputs: [is_signer] -#! Locals: -#! 0: is_signer_found -#! 1: current_signer_index -#! -#! Invocation: call -@locals(2) -pub proc is_signer(pub_key: BeWord) -> felt - # initialize is_signer_found = false - push.0 loc_store.IS_SIGNER_FOUND_LOC - # => [PUB_KEY] - - exec.get_threshold_and_num_approvers - # => [threshold, num_approvers, PUB_KEY] - - drop - # => [num_approvers, PUB_KEY] - - dup neq.0 - # => [has_remaining_signers, num_approvers, PUB_KEY] - - while.true - # => [i, PUB_KEY] - - sub.1 - # => [i-1, PUB_KEY] - - # store i-1 for this loop iteration before map lookup - dup loc_store.CURRENT_SIGNER_INDEX_LOC - # => [i-1, PUB_KEY] - - exec.create_approver_map_key - # => [APPROVER_MAP_KEY, PUB_KEY] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, APPROVER_MAP_KEY, PUB_KEY] - - exec.active_account::get_initial_map_item - # => [CURRENT_PUB_KEY, PUB_KEY] - - dupw.1 exec.word::eq - # => [is_pub_key_match, PUB_KEY] - - loc_store.IS_SIGNER_FOUND_LOC - # => [PUB_KEY] - - loc_load.CURRENT_SIGNER_INDEX_LOC - # => [i-1, PUB_KEY] - - dup neq.0 - # => [has_remaining_signers, i-1, PUB_KEY] - - loc_load.IS_SIGNER_FOUND_LOC not - # => [!is_signer_found, has_remaining_signers, i-1, PUB_KEY] - - and - # => [should_loop, i-1, PUB_KEY] - end - - drop dropw - # => [] - - loc_load.IS_SIGNER_FOUND_LOC - # => [is_signer] -end - - -#! Authenticate a transaction using the signature scheme specified by scheme_id -#! with multi-signature support -#! -#! Supported schemes: -#! - 1 => ecdsa_k256_keccak -#! - 2 => falcon512_rpo -#! -#! This procedure implements multi-signature authentication by: -#! 1. Computing the transaction summary message that needs to be signed -#! 2. Verifying signatures from multiple required signers against their public keys -#! 3. Ensuring the minimum threshold of valid signatures is met -#! 4. Implementing replay protection by tracking executed transactions +#! Authenticate a transaction with multi-signature support. #! #! Inputs: #! Operand stack: [SALT] -#! Advice map: { -#! h(SIG_0, MSG): SIG_0, -#! h(SIG_1, MSG): SIG_1, -#! h(SIG_n, MSG): SIG_n -#! } #! Outputs: #! Operand stack: [] #! -#! Where: -#! - SALT is a cryptographically random nonce that enables multiple concurrent -#! multisig transactions while maintaining replay protection. Each transaction -#! must use a unique SALT value to ensure transaction uniqueness. -#! - SIG_i is the signature from the i-th signer. -#! - MSG is the transaction message being signed. -#! - h(SIG_i, MSG) is the hash of the signature and message used as the advice map key. -#! -#! Panics if: -#! - insufficient number of valid signatures (below threshold). -#! - the same transaction has already been executed (replay protection). -#! #! Invocation: call -@locals(1) -pub proc auth_tx_multisig(salt: BeWord) - exec.native_account::incr_nonce drop - # => [SALT] - - # ------ Computing transaction summary ------ - - exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - # to build a tx_summary in the host, we need these four words in the advice provider - exec.auth::adv_insert_hqword - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - # the commitment to the tx summary is the message that is signed - exec.auth::hash_tx_summary +@auth_script +pub proc auth_tx_multisig(salt: word) + exec.multisig::auth_tx # => [TX_SUMMARY_COMMITMENT] - # ------ Verifying approver signatures ------ - - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [config_slot_prefix, config_slot_suffix, TX_SUMMARY_COMMITMENT] - - exec.active_account::get_initial_item - # => [0, 0, num_of_approvers, default_threshold, TX_SUMMARY_COMMITMENT] - - drop drop - # => [num_of_approvers, default_threshold, TX_SUMMARY_COMMITMENT] - - swap movdn.5 - # => [num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - push.APPROVER_SCHEME_ID_SLOT[0..2] - # => [approver_scheme_id_slot_prefix, approver_scheme_id_slot_suffix, pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - exec.::miden::standards::auth::signature::verify_signatures - # => [num_verified_signatures, TX_SUMMARY_COMMITMENT, default_threshold] - - # ------ Checking threshold is >= num_verified_signatures ------ - - movup.5 - # => [default_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] - - exec.compute_transaction_threshold - # => [transaction_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] - - u32assert2 u32lt - # => [is_unauthorized, TX_SUMMARY_COMMITMENT] - - # If signatures are non-existent the tx will fail here. - if.true - emit.AUTH_UNAUTHORIZED_EVENT - push.0 assert.err="insufficient number of signatures" - end - - # ------ Writing executed transaction MSG to map ------ - - exec.assert_new_tx - # => [TX_SUMMARY_COMMITMENT] + exec.multisig::assert_new_tx + # => [] end diff --git a/crates/miden-standards/asm/account_components/auth/multisig_psm.masm b/crates/miden-standards/asm/account_components/auth/multisig_psm.masm new file mode 100644 index 0000000000..591ba376ab --- /dev/null +++ b/crates/miden-standards/asm/account_components/auth/multisig_psm.masm @@ -0,0 +1,37 @@ +# The MASM code of the Multi-Signature Authentication Component with Private State Manager. +# +# See the `AuthMultisigPsm` Rust type's documentation for more details. + +use miden::standards::auth::multisig +use miden::standards::auth::psm + +pub use multisig::update_signers_and_threshold +pub use multisig::get_threshold_and_num_approvers +pub use multisig::set_procedure_threshold +pub use multisig::get_signer_at +pub use multisig::is_signer + +pub use psm::update_psm_public_key + +#! Authenticate a transaction with multi-signature support and optional PSM verification. +#! +#! Inputs: +#! Operand stack: [SALT] +#! Outputs: +#! Operand stack: [] +#! +#! Invocation: call +@auth_script +pub proc auth_tx_multisig_psm(salt: word) + exec.multisig::auth_tx + # => [TX_SUMMARY_COMMITMENT] + + dupw + # => [TX_SUMMARY_COMMITMENT, TX_SUMMARY_COMMITMENT] + + exec.psm::verify_signature + # => [TX_SUMMARY_COMMITMENT] + + exec.multisig::assert_new_tx + # => [] +end diff --git a/crates/miden-standards/asm/account_components/auth/no_auth.masm b/crates/miden-standards/asm/account_components/auth/no_auth.masm index 4ced08325f..d0b2c86f26 100644 --- a/crates/miden-standards/asm/account_components/auth/no_auth.masm +++ b/crates/miden-standards/asm/account_components/auth/no_auth.masm @@ -12,6 +12,7 @@ use miden::core::word #! #! Inputs: [pad(16)] #! Outputs: [pad(16)] +@auth_script pub proc auth_no_auth # check if the account state has changed by comparing initial and final commitments diff --git a/crates/miden-standards/asm/account_components/auth/singlesig.masm b/crates/miden-standards/asm/account_components/auth/singlesig.masm index f16edb85c9..ab9b587f48 100644 --- a/crates/miden-standards/asm/account_components/auth/singlesig.masm +++ b/crates/miden-standards/asm/account_components/auth/singlesig.masm @@ -5,8 +5,6 @@ use miden::standards::auth::signature use miden::protocol::active_account -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - # CONSTANTS # ================================================================================================= @@ -20,7 +18,7 @@ const SCHEME_ID_SLOT = word("miden::standards::auth::singlesig::scheme") #! #! Supported schemes: #! - 1 => ecdsa_k256_keccak -#! - 2 => falcon512_rpo +#! - 2 => falcon512_poseidon2 #! #! It first increments the nonce of the account, independent of whether the account's state has #! changed or not. Then it computes and signs the following message (in memory order): @@ -35,10 +33,11 @@ const SCHEME_ID_SLOT = word("miden::standards::auth::singlesig::scheme") #! Outputs: [pad(16)] #! #! Invocation: call -pub proc auth_tx(auth_args: BeWord) +@auth_script +pub proc auth_tx(auth_args: word) dropw # => [pad(16)] - + # Fetch public key from storage. # --------------------------------------------------------------------------------------------- @@ -46,9 +45,9 @@ pub proc auth_tx(auth_args: BeWord) # => [PUB_KEY, pad(16)] push.SCHEME_ID_SLOT[0..2] exec.active_account::get_item - # => [0, 0, 0, scheme_id, PUB_KEY, pad(16)] + # => [scheme_id, 0, 0, 0, PUB_KEY, pad(16)] - drop drop drop movdn.4 + movdn.7 drop drop drop # => [PUB_KEY, scheme_id, pad(16)] exec.signature::authenticate_transaction diff --git a/crates/miden-standards/asm/account_components/auth/singlesig_acl.masm b/crates/miden-standards/asm/account_components/auth/singlesig_acl.masm index 701c5036d7..b3484554a5 100644 --- a/crates/miden-standards/asm/account_components/auth/singlesig_acl.masm +++ b/crates/miden-standards/asm/account_components/auth/singlesig_acl.masm @@ -8,8 +8,6 @@ use miden::protocol::tx use miden::standards::auth::signature use miden::core::word -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - # CONSTANTS # ================================================================================================ @@ -25,12 +23,15 @@ const AUTH_CONFIG_SLOT = word("miden::standards::auth::singlesig_acl::config") # The slot where the map of auth trigger procedure roots is stored. const AUTH_TRIGGER_PROCS_MAP_SLOT = word("miden::standards::auth::singlesig_acl::trigger_procedure_roots") +const ALLOW_UNAUTHORIZED_OUTPUT_NOTES_LOC = 0 +const ALLOW_UNAUTHORIZED_INPUT_NOTES_LOC = 1 + #! Authenticate a transaction using the signature scheme specified by scheme_id #! based on procedure calls and note usage. #! #! Supported schemes: #! - 1 => ecdsa_k256_keccak -#! - 2 => falcon512_rpo +#! - 2 => falcon512_poseidon2 #! #! This authentication procedure checks: #! 1. If any of the trigger procedures were called during the transaction @@ -44,19 +45,21 @@ const AUTH_TRIGGER_PROCS_MAP_SLOT = word("miden::standards::auth::singlesig_acl: #! Outputs: [pad(16)] #! #! Invocation: call +@auth_script @locals(2) -pub proc auth_tx_acl(auth_args: BeWord) +pub proc auth_tx_acl(auth_args: word) dropw # => [pad(16)] # Get the authentication configuration push.AUTH_CONFIG_SLOT[0..2] exec.active_account::get_item - # => [0, allow_unauthorized_input_notes, allow_unauthorized_output_notes, num_auth_trigger_procs, pad(16)] + # => [num_auth_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, 0, pad(16)] - drop - # => [allow_unauthorized_input_notes, allow_unauthorized_output_notes, num_auth_trigger_procs, pad(16)] + movup.3 drop + # => [num_auth_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, pad(16)] - loc_store.1 loc_store.0 + swap loc_store.ALLOW_UNAUTHORIZED_OUTPUT_NOTES_LOC + swap loc_store.ALLOW_UNAUTHORIZED_INPUT_NOTES_LOC # => [num_auth_trigger_procs, pad(16)] # ------ Check if any trigger procedure was called ------ @@ -72,8 +75,8 @@ pub proc auth_tx_acl(auth_args: BeWord) # => [require_acl_auth, i, pad(16)] # Get the procedure root from storage - dup.1 sub.1 push.0.0.0 push.AUTH_TRIGGER_PROCS_MAP_SLOT[0..2] - # => [trigger_proc_slot_prefix, trigger_proc_slot_suffix, [0, 0, 0, i-1], require_acl_auth, i, pad(16)] + push.0.0.0 dup.4 sub.1 push.AUTH_TRIGGER_PROCS_MAP_SLOT[0..2] + # => [trigger_proc_slot_prefix, trigger_proc_slot_suffix, [i-1, 0, 0, 0], require_acl_auth, i, pad(16)] exec.active_account::get_map_item # => [AUTH_TRIGGER_PROC_ROOT, require_acl_auth, i, pad(16)] @@ -105,7 +108,7 @@ pub proc auth_tx_acl(auth_args: BeWord) neq.0 # => [were_output_notes_created, require_acl_auth, pad(16)] - loc_load.0 not + loc_load.ALLOW_UNAUTHORIZED_OUTPUT_NOTES_LOC not # => [!allow_unauthorized_output_notes, were_output_notes_created, require_acl_auth, pad(16)] and @@ -122,7 +125,7 @@ pub proc auth_tx_acl(auth_args: BeWord) neq.0 # => [were_input_notes_consumed, auth_required, pad(16)] - loc_load.1 not + loc_load.ALLOW_UNAUTHORIZED_INPUT_NOTES_LOC not # => [!allow_unauthorized_input_notes, were_input_notes_consumed, auth_required, pad(16)] and @@ -139,9 +142,9 @@ pub proc auth_tx_acl(auth_args: BeWord) # Fetch scheme_id from storage push.SCHEME_ID_SLOT[0..2] exec.active_account::get_item - # => [0, 0, 0, scheme_id, PUB_KEY, pad(16)] + # => [[scheme_id, 0, 0, 0], PUB_KEY, pad(16)] - drop drop drop movdn.4 + movdn.7 drop drop drop # => [PUB_KEY, scheme_id, pad(16)] exec.signature::authenticate_transaction diff --git a/crates/miden-standards/asm/account_components/faucets/basic_fungible_faucet.masm b/crates/miden-standards/asm/account_components/faucets/basic_fungible_faucet.masm index 5d3b13a920..37895b9ef7 100644 --- a/crates/miden-standards/asm/account_components/faucets/basic_fungible_faucet.masm +++ b/crates/miden-standards/asm/account_components/faucets/basic_fungible_faucet.masm @@ -2,5 +2,5 @@ # # See the `BasicFungibleFaucet` Rust type's documentation for more details. -pub use ::miden::standards::faucets::basic_fungible::distribute +pub use ::miden::standards::faucets::basic_fungible::mint_and_send pub use ::miden::standards::faucets::basic_fungible::burn diff --git a/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm b/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm index 7d350a4224..0aee492d62 100644 --- a/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm +++ b/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm @@ -2,7 +2,5 @@ # # See the `NetworkFungibleFaucet` Rust type's documentation for more details. -pub use ::miden::standards::faucets::network_fungible::distribute +pub use ::miden::standards::faucets::network_fungible::mint_and_send pub use ::miden::standards::faucets::network_fungible::burn -pub use ::miden::standards::faucets::network_fungible::transfer_ownership -pub use ::miden::standards::faucets::network_fungible::renounce_ownership diff --git a/crates/miden-standards/asm/account_components/mint_policies/auth_controlled.masm b/crates/miden-standards/asm/account_components/mint_policies/auth_controlled.masm new file mode 100644 index 0000000000..8f817b74ff --- /dev/null +++ b/crates/miden-standards/asm/account_components/mint_policies/auth_controlled.masm @@ -0,0 +1,7 @@ +# The MASM code of the Mint Policy Auth Controlled Account Component. +# +# See the `AuthControlled` Rust type's documentation for more details. + +pub use ::miden::standards::mint_policies::auth_controlled::allow_all +pub use ::miden::standards::mint_policies::policy_manager::set_mint_policy +pub use ::miden::standards::mint_policies::policy_manager::get_mint_policy diff --git a/crates/miden-standards/asm/account_components/mint_policies/owner_controlled.masm b/crates/miden-standards/asm/account_components/mint_policies/owner_controlled.masm new file mode 100644 index 0000000000..cc21f8f0de --- /dev/null +++ b/crates/miden-standards/asm/account_components/mint_policies/owner_controlled.masm @@ -0,0 +1,7 @@ +# The MASM code of the Mint Policy Owner Controlled Account Component. +# +# See the `OwnerControlled` Rust type's documentation for more details. + +pub use ::miden::standards::mint_policies::owner_controlled::owner_only +pub use ::miden::standards::mint_policies::policy_manager::set_mint_policy +pub use ::miden::standards::mint_policies::policy_manager::get_mint_policy diff --git a/crates/miden-standards/asm/standards/access/ownable.masm b/crates/miden-standards/asm/standards/access/ownable.masm index 79702c1945..b0591e71a5 100644 --- a/crates/miden-standards/asm/standards/access/ownable.masm +++ b/crates/miden-standards/asm/standards/access/ownable.masm @@ -15,7 +15,7 @@ use miden::protocol::native_account const OWNER_CONFIG_SLOT = word("miden::standards::access::ownable::owner_config") # ZERO_ADDRESS word (all zeros) used to represent no owner -# Format: [prefix=0, suffix=0, 0, 0] as stored in account storage +# Layout: [suffix=0, prefix=0, 0, 0] as stored in account storage const ZERO_ADDRESS = [0, 0, 0, 0] # ERRORS @@ -29,45 +29,32 @@ const ERR_SENDER_NOT_OWNER = "note sender is not the owner" #! Returns the owner AccountId from storage. #! #! Inputs: [] -#! Outputs: [owner_prefix, owner_suffix] +#! Outputs: [owner_suffix, owner_prefix] #! #! Where: -#! - owner_{prefix, suffix} are the prefix and suffix felts of the owner AccountId. +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner AccountId. proc owner push.OWNER_CONFIG_SLOT[0..2] exec.active_account::get_item - # => [owner_prefix, owner_suffix, 0, 0] - - # Storage format in memory: [0, 0, suffix, prefix] (word[0], word[1], word[2], word[3]) - # mem_loadw_be loads big-endian (reversed), so stack gets: [prefix, suffix, 0, 0] - # Stack: [owner_prefix (pos 0), owner_suffix (pos 1), 0 (pos 2), 0 (pos 3)] - # We want: [owner_prefix, owner_suffix] - # Move zeros to top using movup, then drop them - movup.2 - # => [0, owner_prefix, owner_suffix, 0] (moves element at pos 2 to pos 0) - - movup.3 - # => [0, 0, owner_prefix, owner_suffix] (moves element at pos 3 to pos 0) + # => [0, 0, owner_suffix, owner_prefix] drop drop - # => [owner_prefix, owner_suffix] + # => [owner_suffix, owner_prefix] end #! Checks if the given account ID is the owner of this component. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [is_owner] #! #! Where: -#! - account_id_{prefix, suffix} are the prefix and suffix felts of the AccountId to check. +#! - account_id_{suffix, prefix} are the suffix and prefix felts of the AccountId to check. #! - is_owner is 1 if the account is the owner, 0 otherwise. proc is_owner - exec.owner - # => [owner_prefix, owner_suffix, account_id_prefix, account_id_suffix] + # => [owner_suffix, owner_prefix, account_id_suffix, account_id_prefix] exec.account_id::is_equal # => [is_owner] - end # PUBLIC INTERFACE @@ -82,7 +69,7 @@ end #! - the note sender is not the owner. pub proc verify_owner exec.active_note::get_sender - # => [sender_prefix, sender_suffix] + # => [sender_suffix, sender_prefix] exec.is_owner # => [is_owner] @@ -94,26 +81,26 @@ end #! Returns the owner AccountId. #! #! Inputs: [pad(16)] -#! Outputs: [owner_prefix, owner_suffix, pad(14)] +#! Outputs: [owner_suffix, owner_prefix, pad(14)] #! #! Where: -#! - owner_{prefix, suffix} are the prefix and suffix felts of the owner AccountId. +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner AccountId. #! #! Invocation: call pub proc get_owner exec.owner - # => [owner_prefix, owner_suffix, pad(14)] + # => [owner_suffix, owner_prefix, pad(14)] end #! Transfers ownership to a new account. #! #! Can only be called by the current owner. #! -#! Inputs: [new_owner_prefix, new_owner_suffix, pad(14)] +#! Inputs: [new_owner_suffix, new_owner_prefix, pad(14)] #! Outputs: [pad(16)] #! #! Where: -#! - new_owner_{prefix, suffix} are the prefix and suffix felts of the new owner AccountId. +#! - new_owner_{suffix, prefix} are the suffix and prefix felts of the new owner AccountId. #! #! Panics if: #! - the note sender is not the owner. @@ -122,13 +109,13 @@ end pub proc transfer_ownership # Check that the caller is the owner exec.verify_owner - # => [new_owner_prefix, new_owner_suffix, pad(14)] + # => [new_owner_suffix, new_owner_prefix, pad(14)] - push.0 movdn.2 push.0 movdn.2 - # => [new_owner_prefix, new_owner_suffix, 0, 0, pad(14)] + push.0.0 + # => [0, 0, new_owner_suffix, new_owner_prefix, pad(14)] push.OWNER_CONFIG_SLOT[0..2] - # => [slot_prefix, slot_suffix, new_owner_prefix, new_owner_suffix, 0, 0, pad(14)] + # => [slot_suffix, slot_prefix, 0, 0, new_owner_suffix, new_owner_prefix, pad(14)] exec.native_account::set_item # => [OLD_OWNER_WORD, pad(14)] @@ -164,7 +151,7 @@ pub proc renounce_ownership # => [0, 0, 0, 0, pad(16)] push.OWNER_CONFIG_SLOT[0..2] - # => [slot_prefix, slot_suffix, 0, 0, 0, 0, pad(16)] + # => [slot_suffix, slot_prefix, 0, 0, 0, 0, pad(16)] exec.native_account::set_item # => [OLD_OWNER_WORD, pad(16)] diff --git a/crates/miden-standards/asm/standards/access/ownable2step.masm b/crates/miden-standards/asm/standards/access/ownable2step.masm new file mode 100644 index 0000000000..d4b7bcffbd --- /dev/null +++ b/crates/miden-standards/asm/standards/access/ownable2step.masm @@ -0,0 +1,369 @@ +# miden::standards::access::ownable2step +# +# Provides two-step ownership management functionality for account components. +# This module can be imported and used by any component that needs owner controls. +# +# Unlike a single-step ownership transfer, this module requires the new owner to explicitly +# accept the transfer before it takes effect. This prevents accidental transfers to incorrect +# addresses, which would permanently lock the component. +# +# The transfer flow is: +# 1. The current owner calls `transfer_ownership` to nominate a new owner. +# 2. The nominated account calls `accept_ownership` to complete the transfer. +# 3. Optionally, the current owner can call `transfer_ownership` with their own address +# to cancel the nominated transfer. +# +# Storage layout (single slot): +# Word: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +# word[0] word[1] word[2] word[3] + +use miden::protocol::active_account +use miden::protocol::account_id +use miden::protocol::active_note +use miden::protocol::native_account + +# CONSTANTS +# ================================================================================================ + +# Ownership config value representing renounced ownership (all zeros). +const RENOUNCED_OWNERSHIP_CONFIG = [0, 0, 0, 0] + +# The slot in this component's storage layout where the owner configuration is stored. +# Contains both the current owner and the nominated owner in a single word. +const OWNER_CONFIG_SLOT = word("miden::standards::access::ownable2step::owner_config") + +# ERRORS +# ================================================================================================ + +const ERR_SENDER_NOT_OWNER = "note sender is not the owner" +const ERR_SENDER_NOT_NOMINATED_OWNER = "note sender is not the nominated owner" +const ERR_NO_NOMINATED_OWNER = "no nominated ownership transfer exists" + +# LOCAL MEMORY ADDRESSES +# ================================================================================================ + +# transfer_ownership locals +const NEW_OWNER_SUFFIX_LOC = 0 +const NEW_OWNER_PREFIX_LOC = 1 +const OWNER_SUFFIX_LOC = 2 +const OWNER_PREFIX_LOC = 3 + +# INTERNAL PROCEDURES +# ================================================================================================ + +#! Returns the full ownership word from storage. +#! +#! Inputs: [] +#! Outputs: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +#! +#! Where: +#! - owner_{suffix, prefix} are the suffix and prefix felts of the current owner account ID. +#! - nominated_owner_{suffix, prefix} are the suffix and prefix felts of the nominated +#! owner account ID. +proc load_ownership_info + push.OWNER_CONFIG_SLOT[0..2] exec.active_account::get_item + # => [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +end + +#! Writes the ownership word to storage and drops the old value. +#! +#! Inputs: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +#! Outputs: [] +proc save_ownership_info + push.OWNER_CONFIG_SLOT[0..2] + # => [slot_suffix, slot_prefix, owner_suffix, owner_prefix, + # nominated_owner_suffix, nominated_owner_prefix] + + exec.native_account::set_item + # => [OLD_OWNERSHIP_WORD] + + dropw + # => [] +end + +#! Returns the owner account ID from storage. +#! +#! Inputs: [] +#! Outputs: [owner_suffix, owner_prefix] +#! +#! Where: +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner account ID. +proc get_owner_internal + exec.load_ownership_info + # => [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] + + movup.2 drop + # => [owner_suffix, owner_prefix, nominated_owner_prefix] + + movup.2 drop + # => [owner_suffix, owner_prefix] +end + +#! Returns the nominated owner account ID from storage. +#! +#! Inputs: [] +#! Outputs: [nominated_owner_suffix, nominated_owner_prefix] +#! +#! Where: +#! - nominated_owner_{suffix, prefix} are the suffix and prefix felts of the nominated +#! owner account ID. +proc get_nominated_owner_internal + exec.load_ownership_info + # => [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] + + drop drop + # => [nominated_owner_suffix, nominated_owner_prefix] +end + +#! Checks if the given account ID is the owner of this component. +#! +#! Inputs: [account_id_suffix, account_id_prefix] +#! Outputs: [is_owner] +#! +#! Where: +#! - is_owner is 1 if the account is the owner, 0 otherwise. +proc is_owner_internal + exec.get_owner_internal + # => [owner_suffix, owner_prefix, account_id_suffix, account_id_prefix] + + exec.account_id::is_equal + # => [is_owner] +end + +#! Checks if the given account ID is the nominated owner of this component. +#! +#! Inputs: [account_id_suffix, account_id_prefix] +#! Outputs: [is_nominated_owner] +#! +#! Where: +#! - account_id_{suffix, prefix} are the suffix and prefix felts of the account ID to check. +#! - is_nominated_owner is 1 if the account is the nominated owner, 0 otherwise. +proc is_nominated_owner_internal + exec.get_nominated_owner_internal + # => [nominated_owner_suffix, nominated_owner_prefix, account_id_suffix, account_id_prefix] + + exec.account_id::is_equal + # => [is_nominated_owner] +end + +#! Checks if the note sender is the owner and panics if not. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Invocation: exec +proc assert_sender_is_owner_internal + exec.active_note::get_sender + # => [sender_suffix, sender_prefix] + + exec.is_owner_internal + # => [is_owner] + + assert.err=ERR_SENDER_NOT_OWNER + # => [] +end + +# PUBLIC INTERFACE +# ================================================================================================ + +#! Checks if the note sender is the owner and panics if not. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Invocation: call +pub proc assert_sender_is_owner + exec.assert_sender_is_owner_internal + # => [pad(16)] +end + +#! Returns the owner account ID. +#! +#! Inputs: [pad(16)] +#! Outputs: [owner_suffix, owner_prefix, pad(14)] +#! +#! Where: +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner account ID. +#! +#! Invocation: call +pub proc get_owner + exec.get_owner_internal + # => [owner_suffix, owner_prefix, pad(16)] + + movup.2 drop movup.2 drop + # => [owner_suffix, owner_prefix, pad(14)] +end + +#! Returns the nominated owner account ID. +#! +#! Inputs: [pad(16)] +#! Outputs: [nominated_owner_suffix, nominated_owner_prefix, pad(14)] +#! +#! Where: +#! - nominated_owner_{suffix, prefix} are the suffix and prefix felts of the nominated +#! owner account ID. Both are zero if no nominated transfer exists. +#! +#! Invocation: call +pub proc get_nominated_owner + exec.get_nominated_owner_internal + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + movup.2 drop movup.2 drop + # => [nominated_owner_suffix, nominated_owner_prefix, pad(14)] +end + +#! Initiates a two-step ownership transfer by setting the nominated owner. +#! +#! The current owner remains in control until the nominated owner calls `accept_ownership`. +#! Can only be called by the current owner. +#! +#! If the new owner is the current owner, any nominated transfer is cancelled and the +#! nominated owner field is cleared. +#! +#! Inputs: [new_owner_suffix, new_owner_prefix, pad(14)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Locals: +#! 0: new_owner_suffix +#! 1: new_owner_prefix +#! 2: owner_suffix +#! 3: owner_prefix +#! +#! Invocation: call +@locals(4) +pub proc transfer_ownership + exec.assert_sender_is_owner_internal + # => [new_owner_suffix, new_owner_prefix, pad(14)] + + dup.1 dup.1 exec.account_id::validate + # => [new_owner_suffix, new_owner_prefix, pad(14)] + + loc_store.NEW_OWNER_SUFFIX_LOC + # => [new_owner_prefix, pad(14)] + + loc_store.NEW_OWNER_PREFIX_LOC + # => [pad(14)] + + exec.get_owner_internal + # => [owner_suffix, owner_prefix, pad(14)] + + loc_store.OWNER_SUFFIX_LOC + # => [owner_prefix, pad(13)] + + loc_store.OWNER_PREFIX_LOC + # => [pad(12)] + + # Check if new_owner == owner (cancel case). + loc_load.NEW_OWNER_PREFIX_LOC loc_load.NEW_OWNER_SUFFIX_LOC + # => [new_owner_suffix, new_owner_prefix, pad(12)] + + loc_load.OWNER_PREFIX_LOC loc_load.OWNER_SUFFIX_LOC + # => [owner_suffix, owner_prefix, new_owner_suffix, new_owner_prefix, pad(12)] + + exec.account_id::is_equal + # => [is_self_transfer, pad(12)] + + if.true + # Cancel ownership transfer and clear nominated owner. + # Stack for save: [owner_suffix, owner_prefix, nominated_suffix=0, nominated_prefix=0] + loc_load.OWNER_PREFIX_LOC loc_load.OWNER_SUFFIX_LOC + # => [owner_suffix, owner_prefix, pad(12)] + + push.0.0 movup.3 movup.3 + # => [owner_suffix, owner_prefix, 0, 0, pad(12)] + else + # Transfer ownership by setting nominated = new_owner. + # Stack for save: [owner_suffix, owner_prefix, new_owner_suffix, new_owner_prefix] + loc_load.NEW_OWNER_PREFIX_LOC loc_load.NEW_OWNER_SUFFIX_LOC + # => [new_owner_suffix, new_owner_prefix, pad(12)] + + loc_load.OWNER_PREFIX_LOC loc_load.OWNER_SUFFIX_LOC + # => [owner_suffix, owner_prefix, new_owner_suffix, new_owner_prefix, pad(12)] + end + + exec.save_ownership_info + # => [pad(12)] +end + +#! Accepts a nominated ownership transfer. The nominated owner becomes the new owner +#! and the nominated owner field is cleared. +#! +#! Can only be called by the nominated owner. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - there is no nominated ownership transfer (nominated owner is zero). +#! - the note sender is not the nominated owner. +#! +#! Invocation: call +pub proc accept_ownership + exec.get_nominated_owner_internal + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + # Check that a nominated transfer exists (nominated owner is not zero). + dup.1 eq.0 dup.1 eq.0 and + # => [is_zero, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + assertz.err=ERR_NO_NOMINATED_OWNER + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + exec.active_note::get_sender + # => [sender_suffix, sender_prefix, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + dup.3 dup.3 + exec.account_id::is_equal + # => [is_sender_nominated_owner, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + assert.err=ERR_SENDER_NOT_NOMINATED_OWNER + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + # Build new ownership word: nominated becomes owner, clear nominated. + # Stack for save: [owner_suffix, owner_prefix, nominated_suffix=0, nominated_prefix=0] + push.0.0 + # => [0, 0, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + # Reorder: move nominated (now new owner) to owner position + movup.3 movup.3 + # => [nominated_owner_suffix, nominated_owner_prefix, 0, 0, pad(16)] + + exec.save_ownership_info + # => [pad(16)] +end + +#! Renounces ownership, leaving the component without an owner. +#! +#! Can only be called by the current owner. Clears both the owner and any nominated owner. +#! +#! Important Note! +#! This feature allows the owner to relinquish administrative privileges, a common pattern +#! after an initial stage with centralized administration is over. Once ownership is renounced, +#! the component becomes permanently ownerless and cannot be managed by any account. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Invocation: call +pub proc renounce_ownership + exec.assert_sender_is_owner_internal + # => [pad(16)] + + push.RENOUNCED_OWNERSHIP_CONFIG + # => [0, 0, 0, 0, pad(16)] + + exec.save_ownership_info + # => [pad(16)] +end diff --git a/crates/miden-standards/asm/standards/attachments/network_account_target.masm b/crates/miden-standards/asm/standards/attachments/network_account_target.masm index 46133f4136..a5ee0bde40 100644 --- a/crates/miden-standards/asm/standards/attachments/network_account_target.masm +++ b/crates/miden-standards/asm/standards/attachments/network_account_target.masm @@ -43,7 +43,7 @@ end #! Returns the account ID encoded in the attachment. #! #! The attachment is expected to have the following layout: -#! [0, exec_hint_tag, account_id_prefix, account_id_suffix] +#! [account_id_suffix, account_id_prefix, exec_hint_tag, 0] #! #! WARNING: This procedure does not validate the attachment scheme or kind. The caller #! should validate these using `is_network_account_target` before calling this procedure. @@ -52,38 +52,39 @@ end #! The caller should validate the account ID if needed using `account_id::validate`. #! #! Inputs: [NOTE_ATTACHMENT] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of an account ID. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of an account ID. #! #! Invocation: exec pub proc get_id - # => [NOTE_ATTACHMENT] = [0, exec_hint_tag, account_id_prefix, account_id_suffix] + # => [NOTE_ATTACHMENT] = [account_id_suffix, account_id_prefix, exec_hint_tag, 0] - drop drop - # => [account_id_prefix, account_id_suffix] + movup.2 drop movup.2 drop + # => [account_id_suffix, account_id_prefix] end #! Creates a new attachment of type NetworkAccountTarget with the following layout: -#! [0, exec_hint_tag, account_id_prefix, account_id_suffix] +#! [account_id_suffix, account_id_prefix, exec_hint_tag, 0] #! -#! Inputs: [account_id_prefix, account_id_suffix, exec_hint] +#! Inputs: [account_id_suffix, account_id_prefix, exec_hint_tag] #! Outputs: [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of an account ID. -#! - exec_hint is the execution hint for the note. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of an account ID. +#! - exec_hint_tag is the encoded execution hint for the note with its tag. #! - attachment_kind is the attachment kind (Word = 1) for use with `output_note::set_attachment`. #! - attachment_scheme is the attachment scheme (1) for use with `output_note::set_attachment`. #! #! Invocation: exec pub proc new - movup.2 - push.0 + # => [account_id_suffix, account_id_prefix, exec_hint_tag] + push.0 movdn.3 + # => [NOTE_ATTACHMENT] = [account_id_suffix, account_id_prefix, exec_hint_tag, 0] push.NETWORK_ACCOUNT_TARGET_ATTACHMENT_KIND push.NETWORK_ACCOUNT_TARGET_ATTACHMENT_SCHEME - # => [attachment_scheme, attachment_kind, ATTACHMENT] + # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] end #! Returns a boolean indicating whether the active account matches the target account @@ -115,14 +116,13 @@ pub proc active_account_matches_target_account # ensure the attachment is a network account target exec.is_network_account_target assert.err=ERR_NOT_NETWORK_ACCOUNT_TARGET - - # => [NOTE_ATTACHMENT] = [0, exec_hint_tag, account_id_prefix, account_id_suffix] + # => [NOTE_ATTACHMENT] = [target_id_suffix, target_id_prefix, exec_hint_tag, 0] exec.get_id - # => [account_id_prefix, account_id_suffix] + # => [target_id_suffix, target_id_prefix] exec.active_account::get_id - # => [account_id_prefix, account_id_suffix, target_id_prefix, target_id_suffix] + # => [active_account_id_suffix, active_account_id_prefix, target_id_suffix, target_id_prefix] exec.account_id::is_equal # => [is_equal] diff --git a/crates/miden-standards/asm/standards/auth/mod.masm b/crates/miden-standards/asm/standards/auth/mod.masm index ec213ee4b8..9d6503aad1 100644 --- a/crates/miden-standards/asm/standards/auth/mod.masm +++ b/crates/miden-standards/asm/standards/auth/mod.masm @@ -1,46 +1,11 @@ use miden::protocol::native_account use miden::protocol::tx -use miden::core::crypto::hashes::rpo256 - -#! Inputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] -#! Outputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] -@locals(16) -pub proc adv_insert_hqword - loc_storew_be.0 - movdnw.3 - loc_storew_be.4 - movdnw.3 - loc_storew_be.8 - movdnw.3 - loc_storew_be.12 - movdnw.3 - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - exec.hash_tx_summary - # => [MESSAGE] - - locaddr.0 - dup add.16 - # => [mem_addr_end, mem_addr_start, MESSAGE] - - movdn.5 movdn.4 - # => [MESSAGE, mem_addr_start, mem_addr_end] - - adv.insert_mem - drop drop - # => [<4 stack elements>] - - loc_loadw_be.12 - padw loc_loadw_be.8 - padw loc_loadw_be.4 - padw loc_loadw_be.0 - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] -end +use miden::core::crypto::hashes::poseidon2 #! Creates the transaction summary and returns it in the order in which it will be hashed. #! #! Inputs: [SALT] -#! Outputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] +#! Outputs: [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] #! #! Where: #! - SALT is a user-defined input recommended to use as replay protection. @@ -48,22 +13,19 @@ end #! - INPUT_NOTES_COMMITMENT is the commitment to the transaction's inputs notes. #! - ACCOUNT_DELTA_COMMITMENT is the commitment to the transaction's account delta. pub proc create_tx_summary - exec.native_account::compute_delta_commitment - # => [ACCOUNT_DELTA_COMMITMENT, SALT] + exec.tx::get_output_notes_commitment + # => [OUTPUT_NOTES_COMMITMENT, SALT] exec.tx::get_input_notes_commitment - # => [INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, SALT] - - exec.tx::get_output_notes_commitment - # => [OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, SALT] + # => [INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] - movupw.3 - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] + exec.native_account::compute_delta_commitment + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] end #! Hashes the provided transaction summary and returns its commitment. #! -#! Inputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] +#! Inputs: [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] #! Outputs: [TX_SUMMARY_COMMITMENT] #! #! Where: @@ -72,26 +34,21 @@ end #! - INPUT_NOTES_COMMITMENT is the commitment to the transaction's inputs notes. #! - ACCOUNT_DELTA_COMMITMENT is the commitment to the transaction's account delta. pub proc hash_tx_summary - swapdw - # => [INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, SALT, OUTPUT_NOTES_COMMITMENT] - # pad capacity element of the hasher padw movdnw.2 - # => [INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, CAPACITY, SALT, OUTPUT_NOTES_COMMITMENT] + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, CAPACITY, OUTPUT_NOTES_COMMITMENT, SALT] - exec.rpo256::permute - # => [RATE, RATE, PERM, SALT, OUTPUT_NOTES_COMMITMENT] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY, OUTPUT_NOTES_COMMITMENT, SALT] # drop rate words dropw dropw - # => [PERM, SALT, OUTPUT_NOTES_COMMITMENT] + # => [CAPACITY, OUTPUT_NOTES_COMMITMENT, SALT] movdnw.2 - # => [SALT, OUTPUT_NOTES_COMMITMENT, PERM] - - exec.rpo256::permute - # => [RATE, RATE, PERM] + # => [OUTPUT_NOTES_COMMITMENT, SALT, CAPACITY] - exec.rpo256::squeeze_digest + exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [TX_SUMMARY_COMMITMENT] end diff --git a/crates/miden-standards/asm/standards/auth/multisig.masm b/crates/miden-standards/asm/standards/auth/multisig.masm new file mode 100644 index 0000000000..ed20ff2325 --- /dev/null +++ b/crates/miden-standards/asm/standards/auth/multisig.masm @@ -0,0 +1,732 @@ +# The MASM code of the Multi-Signature Authentication Component. +# +# See the `AuthMultisig` Rust type's documentation for more details. + +use miden::protocol::active_account +use miden::protocol::auth::AUTH_UNAUTHORIZED_EVENT +use miden::protocol::native_account +use miden::standards::auth +use miden::core::word + +# Local Memory Addresses +const IS_SIGNER_FOUND_LOC=0 +const CURRENT_SIGNER_INDEX_LOC=1 + +const NEW_NUM_OF_APPROVERS_LOC=0 +const INIT_NUM_OF_APPROVERS_LOC=1 + +const DEFAULT_THRESHOLD_LOC=0 + +# CONSTANTS +# ================================================================================================= + +# Storage Slots +# +# This authentication component uses named storage slots. +# - THRESHOLD_CONFIG_SLOT: +# [default_threshold, num_approvers, 0, 0] +# +# - APPROVER_PUBLIC_KEYS_SLOT (map): +# APPROVER_MAP_KEY => APPROVER_PUBLIC_KEY +# where APPROVER_MAP_KEY = [key_index, 0, 0, 0] +# +# - APPROVER_SCHEME_ID_SLOT (map): +# APPROVER_MAP_KEY => [scheme_id, 0, 0, 0] +# where APPROVER_MAP_KEY = [key_index, 0, 0, 0] +# +# - EXECUTED_TXS_SLOT (map): +# TRANSACTION_MESSAGE => [is_executed, 0, 0, 0] +# +# - PROC_THRESHOLD_ROOTS_SLOT (map): +# PROC_ROOT => [proc_threshold, 0, 0, 0] + + +# The slot in this component's storage layout where the default signature threshold and +# number of approvers are stored as: +# [default_threshold, num_approvers, 0, 0]. +# The threshold is guaranteed to be less than or equal to num_approvers. +const THRESHOLD_CONFIG_SLOT = word("miden::standards::auth::multisig::threshold_config") + +# The slot in this component's storage layout where the public keys map is stored. +# Map entries: [key_index, 0, 0, 0] => APPROVER_PUBLIC_KEY +const APPROVER_PUBLIC_KEYS_SLOT = word("miden::standards::auth::multisig::approver_public_keys") + +# The slot in this component's storage layout where the scheme id for the corresponding public keys map is stored. +# Map entries: [key_index, 0, 0, 0] => [scheme_id, 0, 0, 0] +const APPROVER_SCHEME_ID_SLOT = word("miden::standards::auth::multisig::approver_schemes") + +# The slot in this component's storage layout where executed transactions are stored. +# Map entries: transaction_message => [is_executed, 0, 0, 0] +const EXECUTED_TXS_SLOT = word("miden::standards::auth::multisig::executed_transactions") + +# The slot in this component's storage layout where procedure thresholds are stored. +# Map entries: PROC_ROOT => [proc_threshold, 0, 0, 0] +const PROC_THRESHOLD_ROOTS_SLOT = word("miden::standards::auth::multisig::procedure_thresholds") + +# Executed Transaction Flag Constant +const IS_EXECUTED_FLAG = [1, 0, 0, 0] + +# ERRORS +# ================================================================================================= + +const ERR_TX_ALREADY_EXECUTED = "failed to approve multisig transaction as it was already executed" + +const ERR_MALFORMED_MULTISIG_CONFIG = "number of approvers must be equal to or greater than threshold" + +const ERR_ZERO_IN_MULTISIG_CONFIG = "number of approvers or threshold must not be zero" + +const ERR_APPROVER_COUNTS_NOT_U32 = "initial and new number of approvers must be u32" + +const ERR_SIGNER_INDEX_NOT_U32 = "signer index must be u32" + +const ERR_PROC_THRESHOLD_NOT_U32 = "procedure threshold must be u32" + +const ERR_NUM_APPROVERS_OR_PROC_THRESHOLD_NOT_U32 = "number of approvers and procedure threshold must be u32" + +const ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS = "procedure threshold exceeds new number of approvers" + +#! Remove old approver public keys and the corresponding scheme ids +#! from the approver public key and scheme id mappings. +#! +#! This procedure cleans up the storage by removing public keys and signature scheme ids of approvers +#! that are no longer part of the multisig configuration. +#! +#! Inputs: [init_num_of_approvers, new_num_of_approvers] +#! Outputs: [] +#! +#! Where: +#! - init_num_of_approvers is the original number of approvers before the update +#! - new_num_of_approvers is the new number of approvers after the update +#! +#! Panics if: +#! - init_num_of_approvers is not a u32 value. +#! - new_num_of_approvers is not a u32 value. +proc cleanup_pubkey_and_scheme_id_mapping(init_num_of_approvers: u32, new_num_of_approvers: u32) + dup.1 dup.1 + u32assert2.err=ERR_APPROVER_COUNTS_NOT_U32 + u32lt + # => [should_loop, i = init_num_of_approvers, new_num_of_approvers] + + while.true + # => [i, new_num_of_approvers] + + sub.1 + # => [i-1, new_num_of_approvers] + + # clear scheme id at APPROVER_MAP_KEY(i-1) + dup exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, new_num_of_approvers] + + padw swapw + # => [APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [scheme_id_slot_suffix, scheme_id_slot_prefix, APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, new_num_of_approvers] + + dropw + # => [i-1, new_num_of_approvers] + + # clear public key at APPROVER_MAP_KEY(i-1) + dup exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, new_num_of_approvers] + + padw swapw + # => [APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, new_num_of_approvers] + + dropw + # => [i-1, new_num_of_approvers] + + dup.1 dup.1 + u32lt + # => [should_loop, i-1, new_num_of_approvers] + end + + drop drop +end + +#! Builds the storage map key for a signer index. +#! +#! Inputs: [key_index] +#! Outputs: [APPROVER_MAP_KEY] +proc create_approver_map_key + push.0.0.0 movup.3 + # => [[key_index, 0, 0, 0]] + # => [APPROVER_MAP_KEY] +end + +#! Asserts that all configured per-procedure threshold overrides are less than or equal to +#! number of approvers +#! +#! Inputs: [num_approvers] +#! Outputs: [] +#! Panics if: +#! - any configured procedure threshold is not a u32 value. +#! - any configured procedure threshold exceeds num_approvers. +proc assert_proc_thresholds_lte_num_approvers(num_approvers: u32) + exec.active_account::get_num_procedures + # => [num_procedures, num_approvers] + + dup neq.0 + # => [should_continue, num_procedures, num_approvers] + while.true + sub.1 dup + # => [proc_index, proc_index, num_approvers] + + exec.active_account::get_procedure_root + # => [PROC_ROOT, proc_index, num_approvers] + + push.PROC_THRESHOLD_ROOTS_SLOT[0..2] + # => [proc_roots_slot_suffix, proc_roots_slot_prefix, PROC_ROOT, proc_index, num_approvers] + + exec.active_account::get_map_item + # => [[proc_threshold, 0, 0, 0], proc_index, num_approvers] + + movdn.3 drop drop drop + # => [proc_threshold, proc_index, num_approvers] + + dup.2 + # => [num_approvers, proc_threshold, proc_index, num_approvers] + + u32assert2.err=ERR_PROC_THRESHOLD_NOT_U32 + u32gt assertz.err=ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS + # => [proc_index, num_approvers] + + dup neq.0 + # => [should_continue, proc_index, num_approvers] + end + # => [proc_index, num_approvers] + + drop drop + # => [] +end + +#! Update threshold config, add & remove approvers, and update the approver scheme ids +#! +#! Inputs: +#! Operand stack: [MULTISIG_CONFIG_HASH, pad(12)] +#! Advice map: { +#! MULTISIG_CONFIG_HASH => +#! [ +#! CONFIG, +#! PUB_KEY_N, PUB_KEY_N-1, ..., PUB_KEY_0, +#! SCHEME_ID_N, SCHEME_ID_N-1, ..., SCHEME_ID_0 +#! ] +#! } +#! Outputs: +#! Operand stack: [] +#! +#! Where: +#! - MULTISIG_CONFIG_HASH is the hash of the threshold, +#! new public key vector, and the corresponding scheme identifiers +#! - MULTISIG_CONFIG is [threshold, num_approvers, 0, 0] +#! - PUB_KEY_i is the public key of the i-th signer +#! - SCHEME_ID_i is the signature scheme id of the i-th signer +#! +#! Locals: +#! 0: new_num_of_approvers +#! 1: init_num_of_approvers +#! +#! Invocation: call +@locals(2) +pub proc update_signers_and_threshold(multisig_config_hash: word) + adv.push_mapval + # => [MULTISIG_CONFIG_HASH, pad(12)] + + adv_loadw + # => [MULTISIG_CONFIG, pad(12)] + + # store new_num_of_approvers for later + dup.1 loc_store.NEW_NUM_OF_APPROVERS_LOC + # => [MULTISIG_CONFIG, pad(12)] + + dup dup.2 + # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] + + # make sure that the threshold is smaller than the number of approvers + u32assert2.err=ERR_MALFORMED_MULTISIG_CONFIG + u32gt assertz.err=ERR_MALFORMED_MULTISIG_CONFIG + # => [MULTISIG_CONFIG, pad(12)] + + dup dup.2 + # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] + + # make sure that threshold or num_approvers are not zero + eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG + eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG + # => [MULTISIG_CONFIG, pad(12)] + + loc_load.NEW_NUM_OF_APPROVERS_LOC + # => [num_approvers, MULTISIG_CONFIG, pad(12)] + + # make sure that all existing procedure threshold overrides remain reachable + exec.assert_proc_thresholds_lte_num_approvers + # => [MULTISIG_CONFIG, pad(12)] + + push.THRESHOLD_CONFIG_SLOT[0..2] + # => [config_slot_suffix, config_slot_prefix, MULTISIG_CONFIG, pad(12)] + + exec.native_account::set_item + # => [OLD_THRESHOLD_CONFIG, pad(12)] + + # store init_num_of_approvers for later + drop loc_store.INIT_NUM_OF_APPROVERS_LOC drop drop + # => [pad(12)] + + loc_load.NEW_NUM_OF_APPROVERS_LOC + # => [num_approvers] + + dup neq.0 + while.true + sub.1 + # => [i-1, pad(12)] + + dup exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, pad(12)] + + padw adv_loadw + # => [PUB_KEY, APPROVER_MAP_KEY, i-1, pad(12)] + + swapw + # => [APPROVER_MAP_KEY, PUB_KEY, i-1, pad(12)] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, APPROVER_MAP_KEY, PUB_KEY, i-1, pad(12)] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, pad(12)] + + # override OLD_VALUE with SCHEME_ID_WORD + adv_loadw + # => [SCHEME_ID_WORD, i-1, pad(12)] + + # validate the scheme id word is in a correct form + exec.auth::signature::assert_supported_scheme_word + # => [SCHEME_ID_WORD, i-1, pad(12)] + + dup.4 exec.create_approver_map_key + # => [APPROVER_MAP_KEY, SCHEME_ID_WORD, i-1, pad(12)] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [scheme_id_slot_id_suffix, scheme_id_slot_id_prefix, APPROVER_MAP_KEY, SCHEME_ID_WORD, i-1, pad(12)] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, pad(12)] + + dropw + # => [i-1, pad(12)] + + dup neq.0 + # => [is_non_zero, i-1, pad(12)] + end + # => [pad(13)] + + drop + # => [pad(12)] + + # compare initial vs current multisig config + + # load init_num_of_approvers & new_num_of_approvers + loc_load.NEW_NUM_OF_APPROVERS_LOC loc_load.INIT_NUM_OF_APPROVERS_LOC + # => [init_num_of_approvers, new_num_of_approvers, pad(12)] + + exec.cleanup_pubkey_and_scheme_id_mapping + # => [pad(12)] +end + +# Computes the effective transaction threshold based on called procedures and per-procedure +# overrides stored in PROC_THRESHOLD_ROOTS_SLOT. Falls back to default_threshold if no +# overrides apply. +# +#! Inputs: [default_threshold] +#! Outputs: [transaction_threshold] +@locals(1) +proc compute_transaction_threshold(default_threshold: u32) -> u32 + # 1. initialize transaction_threshold = 0 + # 2. iterate through all account procedures + # a. check if the procedure was called during the transaction + # b. if called, get the override threshold of that procedure from the config map + # c. if proc_threshold > transaction_threshold, set transaction_threshold = proc_threshold + # 3. if transaction_threshold == 0 at the end, revert to using default_threshold + + # store default_threshold for later + loc_store.DEFAULT_THRESHOLD_LOC + # => [] + + # 1. initialize transaction_threshold = 0 + push.0 + # => [transaction_threshold] + + # get the number of account procedures + exec.active_account::get_num_procedures + # => [num_procedures, transaction_threshold] + + # 2. iterate through all account procedures + dup neq.0 + # => [should_continue, num_procedures, transaction_threshold] + while.true + sub.1 dup + # => [num_procedures-1, num_procedures-1, transaction_threshold] + + # get procedure root of the procedure with index i + exec.active_account::get_procedure_root dupw + # => [PROC_ROOT, PROC_ROOT, num_procedures-1, transaction_threshold] + + # 2a. check if this procedure has been called in the transaction + exec.native_account::was_procedure_called + # => [was_called, PROC_ROOT, num_procedures-1, transaction_threshold] + + # if it has been called, get the override threshold of that procedure + if.true + # => [PROC_ROOT, num_procedures-1, transaction_threshold] + + push.PROC_THRESHOLD_ROOTS_SLOT[0..2] + # => [proc_roots_slot_suffix, proc_roots_slot_prefix, PROC_ROOT, num_procedures-1, transaction_threshold] + + # 2b. get the override proc_threshold of that procedure + # if the procedure has no override threshold, the returned map item will be [0, 0, 0, 0] + exec.active_account::get_initial_map_item + # => [[proc_threshold, 0, 0, 0], num_procedures-1, transaction_threshold] + + movdn.3 drop drop drop dup dup.3 + # => [transaction_threshold, proc_threshold, proc_threshold, num_procedures-1, transaction_threshold] + + u32assert2.err="transaction threshold or procedure threshold are not u32" + u32gt + # => [is_gt, proc_threshold, num_procedures-1, transaction_threshold] + # 2c. if proc_threshold > transaction_threshold, update transaction_threshold + movup.2 movdn.3 + # => [is_gt, proc_threshold, transaction_threshold, num_procedures-1] + cdrop + # => [updated_transaction_threshold, num_procedures-1] + swap + # => [num_procedures-1, updated_transaction_threshold] + # if it has not been called during this transaction, nothing to do, move to the next procedure + else + dropw + # => [num_procedures-1, transaction_threshold] + end + + dup neq.0 + # => [should_continue, num_procedures-1, transaction_threshold] + end + + drop + # => [transaction_threshold] + + loc_load.DEFAULT_THRESHOLD_LOC + # => [default_threshold, transaction_threshold] + + # 3. if transaction_threshold == 0 at the end, revert to using default_threshold + dup.1 eq.0 + # => [is_zero, default_threshold, transaction_threshold] + + cdrop + # => [effective_transaction_threshold] +end + +#! Returns current num_approvers and the threshold `THRESHOLD_CONFIG_SLOT` +#! +#! Inputs: [] +#! Outputs: [threshold, num_approvers] +#! +#! Invocation: call +pub proc get_threshold_and_num_approvers + push.THRESHOLD_CONFIG_SLOT[0..2] + exec.active_account::get_initial_item + # => [threshold, num_approvers, 0, 0] + + movup.2 drop movup.2 drop + # => [threshold, num_approvers] +end + +#! Sets or clears a per-procedure threshold override. +#! +#! Inputs: [proc_threshold, PROC_ROOT] +#! Outputs: [] +#! +#! Where: +#! - PROC_ROOT is the root of the account procedure whose override is being updated. +#! - proc_threshold is the override threshold to set. +#! - if proc_threshold == 0, override is cleared and the default threshold applies. +#! - if proc_threshold > 0, it must be <= current num_approvers. +#! +#! Panics if: +#! - proc_threshold is not a u32 value. +#! - current num_approvers is not a u32 value. +#! - proc_threshold > current num_approvers. +#! +#! Invocation: call +pub proc set_procedure_threshold + exec.get_threshold_and_num_approvers + # => [default_threshold, num_approvers, proc_threshold, PROC_ROOT] + + drop + # => [num_approvers, proc_threshold, PROC_ROOT] + + dup.1 swap + # => [num_approvers, proc_threshold, proc_threshold, PROC_ROOT] + + u32assert2.err=ERR_NUM_APPROVERS_OR_PROC_THRESHOLD_NOT_U32 + u32gt assertz.err=ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS + # => [proc_threshold, PROC_ROOT] + + # Store [proc_threshold, 0, 0, 0] = PROC_THRESHOLD_WORD, where proc_threshold == 0 acts as clear. + push.0.0.0 + movup.3 + swapw + # => [PROC_ROOT, PROC_THRESHOLD_WORD] + + push.PROC_THRESHOLD_ROOTS_SLOT[0..2] + # => [proc_roots_slot_suffix, proc_roots_slot_prefix, PROC_ROOT, PROC_THRESHOLD_WORD] + + exec.native_account::set_map_item + # => [OLD_PROC_THRESHOLD_WORD] + + dropw + # => [] +end + +#! Returns signer public key at index i +#! +#! Inputs: [index] +#! Outputs: [PUB_KEY, scheme_id] +#! +#! Panics if: +#! - index is not a u32 value. +#! +#! Invocation: call +pub proc get_signer_at + u32assert.err=ERR_SIGNER_INDEX_NOT_U32 + # => [index] + + dup + # => [index, index] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, index] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [APPROVER_PUBLIC_KEYS_SLOT, APPROVER_MAP_KEY, index] + + exec.active_account::get_initial_map_item + # => [PUB_KEY, index] + + movup.4 + # => [index, PUB_KEY] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, PUB_KEY] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [APPROVER_SCHEME_ID_SLOT, APPROVER_MAP_KEY, PUB_KEY] + + exec.active_account::get_initial_map_item + # => [SCHEME_ID_WORD, PUB_KEY] + + movdn.3 drop drop drop + # => [scheme_id, PUB_KEY] + + movdn.4 + # => [PUB_KEY, scheme_id] +end + + +#! Returns 1 if PUB_KEY is a current signer, else 0. +#! +#! Inputs: [PUB_KEY] +#! Outputs: [is_signer] +#! Locals: +#! 0: is_signer_found +#! 1: current_signer_index +#! +#! Invocation: call +@locals(2) +pub proc is_signer(pub_key: word) -> felt + # initialize is_signer_found = false + push.0 loc_store.IS_SIGNER_FOUND_LOC + # => [PUB_KEY] + + exec.get_threshold_and_num_approvers + # => [threshold, num_approvers, PUB_KEY] + + drop + # => [num_approvers, PUB_KEY] + + dup neq.0 + # => [has_remaining_signers, num_approvers, PUB_KEY] + + while.true + # => [i, PUB_KEY] + + sub.1 + # => [i-1, PUB_KEY] + + # store i-1 for this loop iteration before map lookup + dup loc_store.CURRENT_SIGNER_INDEX_LOC + # => [i-1, PUB_KEY] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, PUB_KEY] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, APPROVER_MAP_KEY, PUB_KEY] + + exec.active_account::get_initial_map_item + # => [CURRENT_PUB_KEY, PUB_KEY] + + dupw.1 exec.word::eq + # => [is_pub_key_match, PUB_KEY] + + loc_store.IS_SIGNER_FOUND_LOC + # => [PUB_KEY] + + loc_load.CURRENT_SIGNER_INDEX_LOC + # => [i-1, PUB_KEY] + + dup neq.0 + # => [has_remaining_signers, i-1, PUB_KEY] + + loc_load.IS_SIGNER_FOUND_LOC not + # => [!is_signer_found, has_remaining_signers, i-1, PUB_KEY] + + and + # => [should_loop, i-1, PUB_KEY] + end + + drop dropw + # => [] + + loc_load.IS_SIGNER_FOUND_LOC + # => [is_signer] +end + +#! Check if transaction has already been executed and add it to executed transactions for replay protection, and +#! finalizes multisig authentication. +#! +#! Inputs: [MSG] +#! Outputs: [] +#! +#! Panics if: +#! - the same transaction has already been executed +#! +#! Invocation: exec +pub proc assert_new_tx(msg: word) + push.IS_EXECUTED_FLAG + # => [[0, 0, 0, is_executed], MSG] + + swapw + # => [TX_SUMMARY_COMMITMENT, IS_EXECUTED_FLAG] + + push.EXECUTED_TXS_SLOT[0..2] + # => [txs_slot_suffix, txs_slot_prefix, MSG, IS_EXECUTED_FLAG] + + # Set the key value pair in the map to mark transaction as executed + exec.native_account::set_map_item + # => [[0, 0, 0, is_executed]] + + movdn.3 drop drop drop + # => [is_executed] + + assertz.err=ERR_TX_ALREADY_EXECUTED + # => [] +end + +#! Authenticate a transaction using the signature scheme specified by scheme_id +#! with multi-signature support +#! +#! Supported schemes: +#! - 1 => ecdsa_k256_keccak +#! - 2 => falcon512_poseidon2 +#! +#! This procedure implements multi-signature authentication by: +#! 1. Computing the transaction summary message that needs to be signed +#! 2. Verifying signatures from multiple required signers against their public keys +#! 3. Ensuring the minimum threshold of valid signatures is met +#! +#! Inputs: +#! Operand stack: [SALT] +#! Advice map: { +#! h(SIG_0, MSG): SIG_0, +#! h(SIG_1, MSG): SIG_1, +#! h(SIG_n, MSG): SIG_n +#! } +#! Outputs: +#! Operand stack: [TX_SUMMARY_COMMITMENT] +#! +#! Where: +#! - SALT is a cryptographically random nonce that enables multiple concurrent +#! multisig transactions while maintaining replay protection. Each transaction +#! must use a unique SALT value to ensure transaction uniqueness. +#! - SIG_i is the signature from the i-th signer. +#! - MSG is the transaction message being signed. +#! - h(SIG_i, MSG) is the hash of the signature and message used as the advice map key. +#! +#! Panics if: +#! - insufficient number of valid signatures (below threshold). +#! +#! Invocation: call +@locals(1) +pub proc auth_tx(salt: word) + exec.native_account::incr_nonce drop + # => [SALT] + + # ------ Computing transaction summary ------ + + exec.auth::create_tx_summary + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] + + # insert tx summary into advice provider for extraction by the host + adv.insert_hqword + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] + + # the commitment to the tx summary is the message that is signed + exec.auth::hash_tx_summary + # => [TX_SUMMARY_COMMITMENT] + + # ------ Verifying approver signatures ------ + + exec.get_threshold_and_num_approvers + # => [default_threshold, num_of_approvers, TX_SUMMARY_COMMITMENT] + + movdn.5 + # => [num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [scheme_id_slot_suffix, scheme_id_slot_prefix, pub_key_slot_suffix, pub_key_slot_prefix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] + + exec.::miden::standards::auth::signature::verify_signatures + # => [num_verified_signatures, TX_SUMMARY_COMMITMENT, default_threshold] + + # ------ Checking threshold is >= num_verified_signatures ------ + + movup.5 + # => [default_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] + + exec.compute_transaction_threshold + # => [transaction_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] + + u32assert2 u32lt + # => [is_unauthorized, TX_SUMMARY_COMMITMENT] + + # If signatures are non-existent the tx will fail here. + if.true + emit.AUTH_UNAUTHORIZED_EVENT + push.0 assert.err="insufficient number of signatures" + end + + # TX_SUMMARY_COMMITMENT is returned so wrappers can run optional checks + # (e.g. PSM) before replay-protection finalization. + # => [TX_SUMMARY_COMMITMENT] +end diff --git a/crates/miden-standards/asm/standards/auth/psm.masm b/crates/miden-standards/asm/standards/auth/psm.masm new file mode 100644 index 0000000000..d778cafb14 --- /dev/null +++ b/crates/miden-standards/asm/standards/auth/psm.masm @@ -0,0 +1,158 @@ +# Private State Manager (PSM) account component. +# This component is composed into account auth flows especially for multisig and adds +# an extra signature check by a dedicated private state manager signer. +# +# Private State Manager (PSM) is a cloud backup and synchronization layer for Miden private accounts +# See: https://github.com/OpenZeppelin/private-state-manager + +use miden::protocol::auth::AUTH_UNAUTHORIZED_EVENT +use miden::protocol::native_account +use miden::standards::auth::tx_policy +use miden::standards::auth::signature + +# IMPORTANT SECURITY NOTES +# -------------------------------------------------------------------------------- +# - By default, exactly one valid PSM signature is required. +# - If `update_psm_public_key` is the only non-auth account procedure called in the current +# transaction, `verify_signature` skips the PSM signature check so key rotation can proceed +# without the old PSM signer. +# - `update_psm_public_key` rotates the PSM public key and corresponding scheme id using the fixed +# map key `PSM_MAP_KEY`. + + +# CONSTANTS +# ================================================================================================= + +# Storage Slots +# +# This authentication component uses named storage slots. +# - PSM_PUBLIC_KEYS_SLOT (map): +# PSM_MAP_KEY => PSM_PUBLIC_KEY +# where: PSM_MAP_KEY = [0, 0, 0, 0] +# +# - PSM_SCHEME_ID_SLOT (map): +# PSM_MAP_KEY => [scheme_id, 0, 0, 0] +# where: PSM_MAP_KEY = [0, 0, 0, 0] + +# The slot in this component's storage layout where the PSM public key map is stored. +# Map entries: [PSM_MAP_KEY] => [PSM_PUBLIC_KEY] +const PSM_PUBLIC_KEYS_SLOT = word("miden::standards::auth::psm::pub_key") + +# The slot in this component's storage layout where the scheme id for the corresponding PSM public key map is stored. +# Map entries: [PSM_MAP_KEY] => [scheme_id, 0, 0, 0] +const PSM_SCHEME_ID_SLOT = word("miden::standards::auth::psm::scheme") + +# Single-entry storage map key where private state manager signer data is stored. +const PSM_MAP_KEY = [0, 0, 0, 0] + +# ERRORS +# ------------------------------------------------------------------------------------------------- +const ERR_INVALID_PSM_SIGNATURE = "invalid private state manager signature" + +# PUBLIC INTERFACE +# ================================================================================================ + +#! Updates the private state manager public key. +#! +#! Inputs: [new_psm_scheme_id, NEW_PSM_PUBLIC_KEY] +#! Outputs: [] +#! +#! Notes: +#! - This procedure only updates the PSM public key and corresponding scheme id. +#! - `verify_signature` skips PSM verification only when this is the only non-auth account +#! procedure called in the transaction. +#! +#! Invocation: call +@locals(1) +pub proc update_psm_public_key(new_psm_scheme_id: felt, new_psm_public_key: word) + # Validate supported signature scheme before committing it to storage. + dup exec.signature::assert_supported_scheme + # => [new_psm_scheme_id, NEW_PSM_PUBLIC_KEY] + + loc_store.0 + # => [NEW_PSM_PUBLIC_KEY] + + push.PSM_MAP_KEY + # => [PSM_MAP_KEY, NEW_PSM_PUBLIC_KEY] + + push.PSM_PUBLIC_KEYS_SLOT[0..2] + # => [psm_pubkeys_slot_prefix, psm_pubkeys_slot_suffix, PSM_MAP_KEY, NEW_PSM_PUBLIC_KEY] + + exec.native_account::set_map_item + # => [OLD_PSM_PUBLIC_KEY] + + dropw + # => [] + + # Store new scheme id as [scheme_id, 0, 0, 0] in the single-entry map. + loc_load.0 + # => [scheme_id] + + push.0.0.0 movup.3 + # => [NEW_PSM_SCHEME_ID_WORD] + + push.PSM_MAP_KEY + # => [PSM_MAP_KEY, NEW_PSM_SCHEME_ID_WORD] + + push.PSM_SCHEME_ID_SLOT[0..2] + # => [psm_scheme_slot_prefix, psm_scheme_slot_suffix, PSM_MAP_KEY, NEW_PSM_SCHEME_ID_WORD] + + exec.native_account::set_map_item + # => [OLD_PSM_SCHEME_ID_WORD] + + dropw + # => [] +end + +#! Conditionally verifies a private state manager signature. +#! +#! Inputs: [MSG] +#! Outputs: [] +#! +#! Panics if: +#! - `update_psm_public_key` is called together with another non-auth account procedure. +#! - `update_psm_public_key` was not called and a valid PSM signature is missing or invalid. +#! +#! Invocation: exec +pub proc verify_signature(msg: word) + procref.update_psm_public_key + # => [UPDATE_PSM_PUBLIC_KEY_ROOT, MSG] + + exec.native_account::was_procedure_called + # => [was_update_psm_public_key_called, MSG] + + if.true + exec.tx_policy::assert_only_one_non_auth_procedure_called + # => [MSG] + + exec.tx_policy::assert_no_input_or_output_notes + # => [MSG] + + dropw + # => [] + else + push.1 + # => [1, MSG] + + push.PSM_PUBLIC_KEYS_SLOT[0..2] + # => [psm_pubkeys_slot_prefix, psm_pubkeys_slot_suffix, 1, MSG] + + push.PSM_SCHEME_ID_SLOT[0..2] + # => [psm_scheme_slot_prefix, psm_scheme_slot_suffix, psm_pubkeys_slot_prefix, psm_pubkeys_slot_suffix, 1, MSG] + + exec.signature::verify_signatures + # => [num_verified_signatures, MSG] + + neq.1 + # => [is_not_exactly_one, MSG] + + if.true + emit.AUTH_UNAUTHORIZED_EVENT + push.0 assert.err=ERR_INVALID_PSM_SIGNATURE + end + # => [MSG] + + dropw + # => [] + end +end diff --git a/crates/miden-standards/asm/standards/auth/signature.masm b/crates/miden-standards/asm/standards/auth/signature.masm index ce013884f8..49cec90b17 100644 --- a/crates/miden-standards/asm/standards/auth/signature.masm +++ b/crates/miden-standards/asm/standards/auth/signature.masm @@ -1,6 +1,6 @@ -use miden::core::crypto::dsa::falcon512rpo +use miden::core::crypto::dsa::falcon512_poseidon2 +use miden::core::crypto::hashes::poseidon2 use miden::core::crypto::dsa::ecdsa_k256_keccak -use miden::core::crypto::hashes::rpo256 use miden::protocol::active_account use miden::protocol::auth::AUTH_REQUEST_EVENT use miden::protocol::native_account @@ -12,28 +12,28 @@ use miden::standards::auth # Auth Scheme ID Structure const ECDSA_K256_KECCAK_SCHEME_ID=1 -const FALCON_512_RPO_SCHEME_ID=2 +const FALCON_512_POSEIDON2_SCHEME_ID=2 # Local Memory Addresses for multisig operations const NUM_OF_APPROVERS_LOC=0 const SIGNER_INDEX_LOC=1 -const PUB_KEY_SLOT_SUFFIX_LOC=4 -const PUB_KEY_SLOT_PREFIX_LOC=5 +const APPROVER_PUB_KEY_SLOT_ID_SUFFIX_LOC=4 +const APPROVER_PUB_KEY_SLOT_ID_PREFIX_LOC=5 const CURRENT_PK_LOC=8 const SUCCESSFUL_VERIFICATIONS_LOC=12 -const SCHEME_ID_SUFFIX_LOC=16 -const SCHEME_ID_PREFIX_LOC=17 +const APPROVER_SCHEME_ID_SLOT_ID_SUFFIX_LOC=16 +const APPROVER_SCHEME_ID_SLOT_ID_PREFIX_LOC=17 # ERRORS # ================================================================================================= -const ERR_INVALID_SCHEME_ID = "invalid signature scheme id: expected 2 for falcon512_rpo, 1 for ecdsa_k256_keccak" +const ERR_INVALID_SCHEME_ID = "invalid signature scheme id: expected 2 for falcon512_poseidon2, 1 for ecdsa_k256_keccak" const ERR_INVALID_SCHEME_ID_WORD = "invalid scheme ID word format expected three zero values followed by the scheme ID" #! Authenticate a transaction using the signature scheme specified by scheme_id. #! #! Supported schemes: #! - 1 => ecdsa_k256_keccak -#! - 2 => falcon512_rpo +#! - 2 => falcon512_poseidon2 #! #! It first increments the nonce of the account, independent of whether the account's state has #! changed or not. Then it computes and signs the following message (in memory order): @@ -52,16 +52,19 @@ pub proc authenticate_transaction # Increment the account's nonce. # --------------------------------------------------------------------------------------------- # This has to happen before computing the delta commitment, otherwise that procedure will abort - push.0.0 exec.tx::get_block_number exec.native_account::incr_nonce - # => [[final_nonce, ref_block_num, 0, 0], PUB_KEY, scheme_id] + exec.tx::get_block_number + push.0.0 + # => [[0, 0, ref_block_num, final_nonce], PUB_KEY, scheme_id] # Compute the message that is signed. # --------------------------------------------------------------------------------------------- exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, PUB_KEY, scheme_id] + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT, PUB_KEY, scheme_id] - exec.auth::adv_insert_hqword + # insert tx summary into advice provider for extraction by the host + adv.insert_hqword + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT, PUB_KEY, scheme_id] # The commitment to the tx summary is the message that is signed exec.auth::hash_tx_summary @@ -94,7 +97,7 @@ end # Verify signature using scheme_id: # 1 => ECDSA (ecdsa_k256_keccak) -# 2 => Falcon (falcon512_rpo) +# 2 => Falcon (falcon512_poseidon2) # # Inputs: [scheme_id, PUB_KEY, MSG] # Outputs: [] @@ -110,14 +113,14 @@ proc verify_signature_by_scheme # OS => [] # AS => [] else - dup eq.FALCON_512_RPO_SCHEME_ID + dup eq.FALCON_512_POSEIDON2_SCHEME_ID # => [is_2, scheme_id, PUB_KEY, MESSAGE] if.true drop # OS => [PUB_KEY, MESSAGE] - exec.falcon512rpo::verify + exec.falcon512_poseidon2::verify # OS => [] # AS => [] else @@ -134,7 +137,7 @@ pub proc is_supported_scheme dup eq.ECDSA_K256_KECCAK_SCHEME_ID # => [is_1, scheme_id] - swap eq.FALCON_512_RPO_SCHEME_ID + swap eq.FALCON_512_POSEIDON2_SCHEME_ID # => [is_2, is_1] or @@ -148,23 +151,21 @@ pub proc assert_supported_scheme exec.is_supported_scheme # => [is_supported] - eq.0 assertz.err=ERR_INVALID_SCHEME_ID + assert.err=ERR_INVALID_SCHEME_ID # => [] end -#! Validates scheme id word shape: [0, 0, 0, scheme_id] +#! Validates scheme id word shape: [scheme_id, 0, 0, 0] #! Inputs: [SCHEME_ID_WORD] #! Outputs: [SCHEME_ID_WORD] pub proc assert_supported_scheme_word - dup.0 neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD - dup.1 neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD - dup.2 neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD + dupw exec.assert_supported_scheme + # => [0, 0, 0, SCHEME_ID_WORD] - dup.3 - # => [scheme_id, SCHEME_ID_WORD] - - exec.assert_supported_scheme + neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD + neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD + neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD # => [SCHEME_ID_WORD] end @@ -180,16 +181,18 @@ end #! the owner public key mapping - the previous signers must authorize the change to the new signers, #! not the new signers authorizing themselves. #! -#! Inputs: [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, MSG] +#! Inputs: [approver_scheme_id_slot_id_suffix, approver_scheme_id_slot_id_prefix, +#! approver_pub_key_slot_id_suffix, approver_pub_key_slot_id_prefix, +#! num_of_approvers, MSG] #! Outputs: [num_verified_signatures, MSG] @locals(18) pub proc verify_signatures - loc_store.SCHEME_ID_PREFIX_LOC - loc_store.SCHEME_ID_SUFFIX_LOC - # => [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, MSG] + loc_store.APPROVER_SCHEME_ID_SLOT_ID_SUFFIX_LOC + loc_store.APPROVER_SCHEME_ID_SLOT_ID_PREFIX_LOC + # => [approver_pub_key_slot_id_suffix, approver_pub_key_slot_id_prefix, num_of_approvers, MSG] - loc_store.PUB_KEY_SLOT_PREFIX_LOC - loc_store.PUB_KEY_SLOT_SUFFIX_LOC + loc_store.APPROVER_PUB_KEY_SLOT_ID_SUFFIX_LOC + loc_store.APPROVER_PUB_KEY_SLOT_ID_PREFIX_LOC # => [num_of_approvers, MSG] # Initializing SUCCESSFUL_VERIFICATIONS local memory address to 0 @@ -209,24 +212,31 @@ pub proc verify_signatures # Fetch public key from storage map. # ----------------------------------------------------------------------------------------- - sub.1 dup loc_store.SIGNER_INDEX_LOC dup push.0.0.0 - loc_load.PUB_KEY_SLOT_SUFFIX_LOC loc_load.PUB_KEY_SLOT_PREFIX_LOC - # => [owner_key_slot_prefix, owner_key_slot_suffix, [0, 0, 0, i-1], i-1, MSG] + sub.1 dup dup loc_store.SIGNER_INDEX_LOC + # => [i-1, i-1, MSG] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, MSG] # Get public key from initial storage state + loc_load.APPROVER_PUB_KEY_SLOT_ID_PREFIX_LOC + loc_load.APPROVER_PUB_KEY_SLOT_ID_SUFFIX_LOC exec.active_account::get_initial_map_item # => [OWNER_PUB_KEY, i-1, MSG] - loc_storew_be.CURRENT_PK_LOC + loc_storew_le.CURRENT_PK_LOC # => [OWNER_PUB_KEY, i-1, MSG] # Check if signature exists for this signer. # ----------------------------------------------------------------------------------------- - movup.4 movdn.8 swapw dupw movdnw.2 - # => [MSG, OWNER_PUB_KEY, MSG, i-1] + movup.4 movdn.8 + # => [OWNER_PUB_KEY, MSG, i-1] + + dupw.1 swapw + # => [OWNER_PUB_KEY, MSG, MSG, i-1] - exec.rpo256::merge + exec.poseidon2::merge # => [SIG_KEY, MSG, i-1] adv.has_mapkey @@ -248,7 +258,7 @@ pub proc verify_signatures # Verify the signature against the public key and message. # ----------------------------------------------------------------------------------------- - loc_loadw_be.CURRENT_PK_LOC + loc_loadw_le.CURRENT_PK_LOC # => [PK, MSG, MSG, i-1] swapw @@ -260,18 +270,18 @@ pub proc verify_signatures swapw # => [PUB_KEY, MSG, MSG, i-1] - # Build map key [0, 0, 0, i-1] from the current signer index. - loc_load.SIGNER_INDEX_LOC push.0.0.0 - # => [[0, 0, 0, i-1], PUB_KEY, MSG, MSG, i-1] + # Build map key from the current signer index. + loc_load.SIGNER_INDEX_LOC exec.create_approver_map_key + # => [APPROVER_MAP_KEY, PUB_KEY, MSG, MSG, i-1] - loc_load.SCHEME_ID_SUFFIX_LOC loc_load.SCHEME_ID_PREFIX_LOC - # => [scheme_id_prefix_loc, scheme_id_suffix_loc, [0, 0, 0, i-1], PUB_KEY, MSG, MSG, i-1] + loc_load.APPROVER_SCHEME_ID_SLOT_ID_PREFIX_LOC loc_load.APPROVER_SCHEME_ID_SLOT_ID_SUFFIX_LOC + # => [scheme_slot_id_suffix, scheme_slot_id_prefix, APPROVER_MAP_KEY, PUB_KEY, MSG, MSG, i-1] # Get scheme_id for signer index i-1 from initial storage state. exec.active_account::get_initial_map_item - # => [0, 0, 0, scheme_id, PUB_KEY, MSG, MSG, i-1] + # => [[scheme_id, 0, 0, 0], PUB_KEY, MSG, MSG, i-1] - drop drop drop + movdn.3 drop drop drop # OS => [scheme_id, PUB_KEY, MSG, MSG, i-1] # AS => [SIGNATURE] @@ -305,3 +315,13 @@ pub proc verify_signatures loc_load.SUCCESSFUL_VERIFICATIONS_LOC # => [num_verified_signatures, MSG] end + +#! Builds the storage map key for a signer index. +#! +#! Inputs: [key_index] +#! Outputs: [APPROVER_MAP_KEY] +proc create_approver_map_key + push.0.0.0 movup.3 + # => [[key_index, 0, 0, 0]] + # => [APPROVER_MAP_KEY] +end diff --git a/crates/miden-standards/asm/standards/auth/tx_policy.masm b/crates/miden-standards/asm/standards/auth/tx_policy.masm new file mode 100644 index 0000000000..76da300070 --- /dev/null +++ b/crates/miden-standards/asm/standards/auth/tx_policy.masm @@ -0,0 +1,80 @@ +use miden::protocol::active_account +use miden::protocol::native_account +use miden::protocol::tx + +const ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE = "procedure must be called alone" +const ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES = "transaction must not include input or output notes" + +#! Asserts that exactly one non-auth account procedure was called in the current transaction. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +@locals(1) # non-auth called proc count +pub proc assert_only_one_non_auth_procedure_called + push.0 + loc_store.0 + # => [] + + exec.active_account::get_num_procedures + # => [num_procedures] + + dup neq.0 + # => [should_continue, num_procedures] + while.true + sub.1 dup + exec.active_account::get_procedure_root dupw + # => [PROC_ROOT, PROC_ROOT] + + exec.native_account::was_procedure_called + # => [was_called, PROC_ROOT] + + if.true + dropw + # => [proc_index] + + # The auth procedure is always at procedure index 0. + dup neq.0 + # => [is_not_auth_proc, proc_index] + + if.true + loc_load.0 add.1 loc_store.0 + # => [proc_index] + end + else + dropw + # => [proc_index] + end + + dup neq.0 + # => [should_continue, proc_index] + end + + drop + # => [] + + loc_load.0 eq.1 + assert.err=ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE + # => [] +end + +#! Asserts that the current transaction does not consume input notes or create output notes. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +pub proc assert_no_input_or_output_notes + exec.tx::get_num_input_notes + # => [num_input_notes] + + assertz.err=ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES + # => [] + + exec.tx::get_num_output_notes + # => [num_output_notes] + + assertz.err=ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES + # => [] +end diff --git a/crates/miden-standards/asm/standards/data_structures/array.masm b/crates/miden-standards/asm/standards/data_structures/array.masm index f005008f13..438c76b9e4 100644 --- a/crates/miden-standards/asm/standards/data_structures/array.masm +++ b/crates/miden-standards/asm/standards/data_structures/array.masm @@ -10,29 +10,27 @@ use miden::protocol::active_account use miden::protocol::native_account -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - # PROCEDURES # ================================================================================================= #! Sets a word in the array at the specified index. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, index, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, index, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier. +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. #! - index is the index at which to store the value (0 to 2^64 - 2^32). #! - VALUE is the word to store at the specified index. #! #! Invocation: exec -pub proc set(slot_id_prefix: felt, slot_id_suffix: felt, index: felt, value: BeWord) -> BeWord - # Build KEY = [index, 0, 0, 0] - push.0.0.0 movup.5 - # => [index, 0, 0, 0, slot_id_prefix, slot_id_suffix, VALUE] +pub proc set(slot_id_suffix: felt, slot_id_prefix: felt, index: felt, value: word) -> word + # Build KEY = [0, 0, 0, index] + movup.2 push.0.0.0 + # => [0, 0, 0, index, slot_id_suffix, slot_id_prefix, VALUE] movup.5 movup.5 - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.native_account::set_map_item # => [OLD_VALUE] @@ -40,22 +38,22 @@ end #! Gets a word from the array at the specified index. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, index] +#! Inputs: [slot_id_suffix, slot_id_prefix, index] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier. +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. #! - index is the index of the element to retrieve (0 to 2^64 - 2^32). #! - VALUE is the word stored at the specified index (zero if not set). #! #! Invocation: exec -pub proc get(slot_id_prefix: felt, slot_id_suffix: felt, index: felt) -> BeWord - # Build KEY = [index, 0, 0, 0] - push.0.0.0 movup.5 - # => [index, 0, 0, 0, slot_id_prefix, slot_id_suffix] +pub proc get(slot_id_suffix: felt, slot_id_prefix: felt, index: felt) -> word + # Build KEY = [0, 0, 0, index] + movup.2 push.0.0.0 + # => [0, 0, 0, index, slot_id_suffix, slot_id_prefix] movup.5 movup.5 - # => [slot_id_prefix, slot_id_suffix, KEY] + # => [slot_id_suffix, slot_id_prefix, KEY] exec.active_account::get_map_item # => [VALUE] diff --git a/crates/miden-standards/asm/standards/data_structures/double_word_array.masm b/crates/miden-standards/asm/standards/data_structures/double_word_array.masm index 09e3a1fcd0..a2cf4f23cc 100644 --- a/crates/miden-standards/asm/standards/data_structures/double_word_array.masm +++ b/crates/miden-standards/asm/standards/data_structures/double_word_array.masm @@ -17,8 +17,7 @@ const SLOT_ID_PREFIX_LOC=0 const SLOT_ID_SUFFIX_LOC=1 const INDEX_LOC=2 -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } -type BeDoubleWord = struct @bigendian { +type DoubleWord = struct { a: felt, b: felt, c: felt, @@ -34,11 +33,11 @@ type BeDoubleWord = struct @bigendian { #! Sets a double-word in the array at the specified index. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, index, VALUE_0, VALUE_1] +#! Inputs: [slot_id_suffix, slot_id_prefix, index, VALUE_0, VALUE_1] #! Outputs: [OLD_VALUE_0, OLD_VALUE_1] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier. +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. #! - index is the index at which to store the value (0 to 2^64 - 2^32). #! - VALUE_0 is the first word to store at the specified index. #! - VALUE_1 is the second word to store at the specified index. @@ -49,38 +48,38 @@ type BeDoubleWord = struct @bigendian { #! Invocation: exec @locals(3) pub proc set( - slot_id_prefix: felt, slot_id_suffix: felt, + slot_id_prefix: felt, index: felt, - value: BeDoubleWord -) -> BeDoubleWord + value: DoubleWord +) -> DoubleWord # save inputs to locals for reuse - loc_store.SLOT_ID_PREFIX_LOC loc_store.SLOT_ID_SUFFIX_LOC + loc_store.SLOT_ID_PREFIX_LOC loc_store.INDEX_LOC # => [VALUE_0, VALUE_1] - # Set the first word under key [index, 0, 0, 0]. - push.0.0.0 + # Set the first word under key [0, 0, 0, index]. loc_load.INDEX_LOC - # => [index, 0, 0, 0, VALUE_0, VALUE_1] + push.0.0.0 + # => [0, 0, 0, index, VALUE_0, VALUE_1] - loc_load.SLOT_ID_SUFFIX_LOC loc_load.SLOT_ID_PREFIX_LOC - # => [slot_id_prefix, slot_id_suffix, KEY_0, VALUE_0, VALUE_1] + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_0, VALUE_0, VALUE_1] exec.native_account::set_map_item # => [OLD_VALUE_0, VALUE_1] swapw - # Set the second word under key [index, 1, 0, 0]. - push.0.0.1 + # Set the second word under key [0, 0, 1, index]. loc_load.INDEX_LOC - # => [index, 1, 0, 0, VALUE_1, OLD_VALUE_0] + push.1.0.0 + # => [0, 0, 1, index, VALUE_1, OLD_VALUE_0] - loc_load.SLOT_ID_SUFFIX_LOC loc_load.SLOT_ID_PREFIX_LOC - # => [slot_id_prefix, slot_id_suffix, KEY_1, VALUE_1, OLD_VALUE_0] + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_1, VALUE_1, OLD_VALUE_0] exec.native_account::set_map_item # => [OLD_VALUE_1, OLD_VALUE_0] @@ -89,44 +88,44 @@ end #! Gets a double-word from the array at the specified index. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, index] +#! Inputs: [slot_id_suffix, slot_id_prefix, index] #! Outputs: [VALUE_0, VALUE_1] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier. +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. #! - index is the index of the element to retrieve (0 to 2^64 - 2^32). #! - VALUE_0 is the first word stored at the specified index (zero if not set). #! - VALUE_1 is the second word stored at the specified index (zero if not set). #! #! Invocation: exec @locals(3) -pub proc get(slot_id_prefix: felt, slot_id_suffix: felt, index: felt) -> BeDoubleWord +pub proc get(slot_id_suffix: felt, slot_id_prefix: felt, index: felt) -> DoubleWord # Save inputs to locals for reuse. - loc_store.SLOT_ID_PREFIX_LOC loc_store.SLOT_ID_SUFFIX_LOC + loc_store.SLOT_ID_PREFIX_LOC loc_store.INDEX_LOC # => [] - # Get the first word from key [index, 0, 0, 0]. - push.0.0.0 + # Get the first word from key [0, 0, 0, index]. loc_load.INDEX_LOC - # => [index, 0, 0, 0] + push.0.0.0 + # => [0, 0, 0, index] - loc_load.SLOT_ID_SUFFIX_LOC loc_load.SLOT_ID_PREFIX_LOC - # => [slot_id_prefix, slot_id_suffix, KEY_0] + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_0] exec.active_account::get_map_item # => [VALUE_0] - # Get the second word from key [index, 1, 0, 0]. - push.0.0.1 + # Get the second word from key [0, 0, 1, index]. loc_load.INDEX_LOC - # => [index, 1, 0, 0, VALUE_0] + push.1.0.0 + # => [0, 0, 1, index, VALUE_0] - loc_load.SLOT_ID_SUFFIX_LOC loc_load.SLOT_ID_PREFIX_LOC - # => [slot_id_prefix, slot_id_suffix, KEY_1, VALUE_0] + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_1, VALUE_0] exec.active_account::get_map_item swapw diff --git a/crates/miden-standards/asm/standards/faucets/basic_fungible.masm b/crates/miden-standards/asm/standards/faucets/basic_fungible.masm index b1229ff40e..2cff662f6f 100644 --- a/crates/miden-standards/asm/standards/faucets/basic_fungible.masm +++ b/crates/miden-standards/asm/standards/faucets/basic_fungible.masm @@ -1,14 +1,17 @@ # BASIC FUNGIBLE FAUCET CONTRACT # # See the `BasicFungibleFaucet` documentation for details. +# +# Note: This component requires `MintPolicyManager` component to also be present in the account. # ================================================================================================= use miden::standards::faucets +use miden::standards::mint_policies::policy_manager # PROCEDURES # ================================================================================================= -#! Distributes freshly minted fungible assets to the provided recipient by creating a note. +#! Mints fungible assets to the provided recipient by creating a note. #! #! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] #! Outputs: [note_idx, pad(15)] @@ -22,11 +25,19 @@ use miden::standards::faucets #! - note_idx is the index of the created note. #! #! Panics if: -#! - any of the validations in faucets::distribute fail. +#! - active mint policy validation fails. +#! - any of the validations in faucets::mint_and_send fail. #! #! Invocation: call -pub proc distribute - exec.faucets::distribute +pub proc mint_and_send + # TODO: Remove once AccountComponentInterface is refactored + # Keep this procedure digest distinct from network_fungible::mint_and_send. + push.0 drop + + exec.policy_manager::execute_mint_policy + # => [new_amount, new_tag, new_note_type, NEW_RECIPIENT, pad(9)] + + exec.faucets::mint_and_send # => [note_idx, pad(15)] end diff --git a/crates/miden-standards/asm/standards/faucets/mod.masm b/crates/miden-standards/asm/standards/faucets/mod.masm index 5451bb9655..a7679ce69e 100644 --- a/crates/miden-standards/asm/standards/faucets/mod.masm +++ b/crates/miden-standards/asm/standards/faucets/mod.masm @@ -3,11 +3,13 @@ use miden::protocol::active_note use miden::protocol::faucet use miden::protocol::native_account use miden::protocol::output_note +use miden::protocol::asset use ::miden::protocol::asset::FUNGIBLE_ASSET_MAX_AMOUNT # CONSTANTS # ================================================================================================= +const ASSET_PTR=0 const PRIVATE_NOTE=2 # ERRORS @@ -33,7 +35,7 @@ const METADATA_SLOT_LOCAL=0 # Layout: [token_supply, max_supply, decimals, token_symbol] const METADATA_SLOT=word("miden::standards::fungible_faucets::metadata") -#! Distributes freshly minted fungible assets to the provided recipient by creating a note. +#! Mints fungible assets to the provided recipient by creating a note. #! #! Inputs: [amount, tag, note_type, RECIPIENT] #! Outputs: [note_idx] @@ -54,16 +56,16 @@ const METADATA_SLOT=word("miden::standards::fungible_faucets::metadata") #! #! Invocation: exec @locals(4) -pub proc distribute +pub proc mint_and_send # Get the configured max supply and the token supply (= current supply). # --------------------------------------------------------------------------------------------- push.METADATA_SLOT[0..2] exec.active_account::get_item - # => [token_symbol, decimals, max_supply, token_supply, amount, tag, note_type, RECIPIENT] + # => [token_supply, max_supply, decimals, token_symbol, amount, tag, note_type, RECIPIENT] # store a copy of the current slot content for the token_supply update later - loc_storew_be.METADATA_SLOT_LOCAL - drop drop + loc_storew_le.METADATA_SLOT_LOCAL + swap movup.2 drop movup.2 drop # => [max_supply, token_supply, amount, tag, note_type, RECIPIENT] # Assert that minting does not violate any supply constraints. @@ -111,43 +113,45 @@ pub proc distribute dup.1 add # => [new_token_supply, amount, tag, note_type, RECIPIENT] - padw loc_loadw_be.METADATA_SLOT_LOCAL - # => [[token_symbol, decimals, max_supply, token_supply], new_token_supply, amount, tag, note_type, RECIPIENT] + padw loc_loadw_le.METADATA_SLOT_LOCAL + # => [[token_supply, max_supply, decimals, token_symbol], new_token_supply, amount, tag, note_type, RECIPIENT] - movup.3 drop - # => [[token_symbol, decimals, max_supply, new_token_supply], amount, tag, note_type, RECIPIENT] + drop movup.3 + # => [[new_token_supply, max_supply, decimals, token_symbol], amount, tag, note_type, RECIPIENT] # update the metadata slot with the new supply push.METADATA_SLOT[0..2] exec.native_account::set_item dropw # => [amount, tag, note_type, RECIPIENT] + # Create a new note. + # --------------------------------------------------------------------------------------------- + + movdn.6 exec.output_note::create + # => [note_idx, amount] + + dup movup.2 + # => [amount, note_idx, note_idx] + # Mint the asset. # --------------------------------------------------------------------------------------------- # creating the asset exec.faucet::create_fungible_asset - # => [ASSET, tag, note_type, RECIPIENT] + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] + + dupw.1 dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, ASSET_VALUE, note_idx, note_idx] # mint the asset; this is needed to satisfy asset preservation logic. # this ensures that the asset's faucet ID matches the native account's ID. # this is ensured because create_fungible_asset creates the asset with the native account's ID exec.faucet::mint - # => [ASSET, tag, note_type, RECIPIENT] + dropw + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] - movdn.9 movdn.9 movdn.9 movdn.9 - # => [tag, note_type, RECIPIENT, ASSET] - - # Create a new note with the asset. + # Add the asset to the note. # --------------------------------------------------------------------------------------------- - # create a note - exec.output_note::create - # => [note_idx, ASSET] - - # load the ASSET and add it to the note - dup movdn.5 movdn.5 - # => [ASSET, note_idx, note_idx] - exec.output_note::add_asset # => [note_idx] end @@ -175,51 +179,47 @@ pub proc burn # --------------------------------------------------------------------------------------------- # this will fail if not called from a note context. - push.0 exec.active_note::get_assets + push.ASSET_PTR exec.active_note::get_assets # => [num_assets, dest_ptr, pad(16)] # Verify we have exactly one asset assert.err=ERR_BASIC_FUNGIBLE_BURN_WRONG_NUMBER_OF_ASSETS # => [dest_ptr, pad(16)] - mem_loadw_be - # => [ASSET, pad(16)] - # => [[faucet_id_prefix, faucet_id_suffix, 0, amount], pad(16)] + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(16)] # Burn the asset from the transaction vault # --------------------------------------------------------------------------------------------- - dup.3 movdn.4 - # => [ASSET, amount, pad(16)] + exec.asset::fungible_to_amount movdn.8 + # => [ASSET_KEY, ASSET_VALUE, amount, pad(16)] # burn the asset # this ensures we only burn assets that were issued by this faucet (which implies they are # fungible) - exec.faucet::burn dropw + exec.faucet::burn # => [amount, pad(16)] # Subtract burnt amount from current token_supply in storage. # --------------------------------------------------------------------------------------------- push.METADATA_SLOT[0..2] exec.active_account::get_item - # => [token_symbol, decimals, max_supply, token_supply, amount, pad(16)] + # => [token_supply, max_supply, decimals, token_symbol, amount, pad(16)] - dup.4 dup.4 - # => [token_supply, amount, token_symbol, decimals, max_supply, token_supply, amount, pad(16)] + dup.4 dup.1 + # => [token_supply, amount, token_supply, max_supply, decimals, token_symbol, amount, pad(16)] # assert that amount <= token_supply lte assert.err=ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY - # => [token_symbol, decimals, max_supply, token_supply, amount, pad(16)] + # => [token_supply, max_supply, decimals, token_symbol, amount, pad(16)] - movup.3 movup.4 - # => [amount, token_supply, token_symbol, decimals, max_supply, pad(16)] + movup.4 + # => [amount, token_supply, max_supply, decimals, token_symbol, pad(16)] # compute new_token_supply = token_supply - amount sub - # => [new_token_supply, token_symbol, decimals, max_supply, pad(16)] - - movdn.3 - # => [token_symbol, decimals, max_supply, new_token_supply, pad(16)] + # => [new_token_supply, max_supply, decimals, token_symbol, pad(16)] # update the metadata slot with the new supply push.METADATA_SLOT[0..2] exec.native_account::set_item dropw diff --git a/crates/miden-standards/asm/standards/faucets/network_fungible.masm b/crates/miden-standards/asm/standards/faucets/network_fungible.masm index 5f405db8fe..9f3c58887d 100644 --- a/crates/miden-standards/asm/standards/faucets/network_fungible.masm +++ b/crates/miden-standards/asm/standards/faucets/network_fungible.masm @@ -1,57 +1,22 @@ -use miden::protocol::active_note +# NETWORK FUNGIBLE FAUCET CONTRACT +# +# Note: This component requires `MintPolicyManager` component to also be present in the account. +# ================================================================================================= + use miden::standards::faucets -use miden::standards::access::ownable +use miden::standards::mint_policies::policy_manager # PUBLIC INTERFACE # ================================================================================================ -# OWNER MANAGEMENT -# ------------------------------------------------------------------------------------------------ - -#! Returns the owner AccountId. -#! -#! Inputs: [] -#! Outputs: [owner_prefix, owner_suffix, pad(14)] -#! -#! Invocation: call -pub use ownable::get_owner - -#! Transfers ownership to a new account. -#! -#! Can only be called by the current owner. -#! -#! Inputs: [new_owner_prefix, new_owner_suffix, pad(14)] -#! Outputs: [pad(16)] -#! -#! Where: -#! - new_owner_{prefix, suffix} are the prefix and suffix felts of the new owner AccountId. -#! -#! Panics if: -#! - the note sender is not the owner. -#! -#! Invocation: call -pub use ownable::transfer_ownership - -#! Renounces ownership, leaving the component without an owner. -#! -#! Can only be called by the current owner. -#! -#! Inputs: [pad(16)] -#! Outputs: [pad(16)] -#! -#! Panics if: -#! - the note sender is not the owner. -#! -#! Invocation: call -pub use ownable::renounce_ownership - -# ASSET DISTRIBUTION +# ASSET MINTING # ------------------------------------------------------------------------------------------------ -#! Distributes freshly minted fungible assets to the provided recipient. +#! Mints fungible assets to the provided recipient by creating a note. #! -#! This procedure first checks if the note sender is the owner of the faucet, and then -#! mints the asset and creates an output note with that asset for the recipient. +#! This procedure first executes the active mint policy configured via +#! `active_policy_proc_root`, and then mints the asset and creates an output note +#! with that asset for the recipient. #! #! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] #! Outputs: [note_idx, pad(15)] @@ -64,15 +29,15 @@ pub use ownable::renounce_ownership #! - note_idx is the index of the created note. #! #! Panics if: -#! - the note sender is not the owner of this faucet. -#! - any of the validations in faucets::distribute fail. +#! - active mint policy validation fails. +#! - any of the validations in faucets::mint_and_send fail. #! #! Invocation: call -pub proc distribute - exec.ownable::verify_owner - # => [amount, tag, aux, note_type, execution_hint, RECIPIENT, pad(7)] +pub proc mint_and_send + exec.policy_manager::execute_mint_policy + # => [new_amount, new_tag, new_note_type, NEW_RECIPIENT, pad(9)] - exec.faucets::distribute + exec.faucets::mint_and_send # => [note_idx, pad(15)] end diff --git a/crates/miden-standards/asm/standards/metadata/storage_schema.masm b/crates/miden-standards/asm/standards/metadata/storage_schema.masm index fe61927c30..546557cf8a 100644 --- a/crates/miden-standards/asm/standards/metadata/storage_schema.masm +++ b/crates/miden-standards/asm/standards/metadata/storage_schema.masm @@ -8,7 +8,7 @@ use miden::protocol::active_account # ================================================================================================= # The slot in this component's storage layout where the account storage schema commitment is stored -const SCHEMA_COMMITMENT_SLOT = word("miden::standards::metadata::storage_schema") +const SCHEMA_COMMITMENT_SLOT = word("miden::standards::metadata::storage_schema::commitment") pub proc get_schema_commitment dropw diff --git a/crates/miden-standards/asm/standards/mint_policies/auth_controlled.masm b/crates/miden-standards/asm/standards/mint_policies/auth_controlled.masm new file mode 100644 index 0000000000..e75250cb72 --- /dev/null +++ b/crates/miden-standards/asm/standards/mint_policies/auth_controlled.masm @@ -0,0 +1,12 @@ +# POLICY PROCEDURES +# ================================================================================================ + +#! Dummy mint predicate to allow all mints. +#! +#! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! Outputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! Invocation: dynexec +pub proc allow_all + # Dummy predicate, no checks yet. + push.0 drop +end diff --git a/crates/miden-standards/asm/standards/mint_policies/owner_controlled.masm b/crates/miden-standards/asm/standards/mint_policies/owner_controlled.masm new file mode 100644 index 0000000000..9b93582d8d --- /dev/null +++ b/crates/miden-standards/asm/standards/mint_policies/owner_controlled.masm @@ -0,0 +1,18 @@ +use miden::standards::access::ownable2step + +# POLICY PROCEDURES +# ================================================================================================ + +#! Owner-only mint predicate. +#! +#! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! Outputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! +#! Panics if: +#! - note sender is not owner. +#! +#! Invocation: dynexec +pub proc owner_only + exec.ownable2step::assert_sender_is_owner + # => [amount, tag, note_type, RECIPIENT, pad(9)] +end diff --git a/crates/miden-standards/asm/standards/mint_policies/policy_manager.masm b/crates/miden-standards/asm/standards/mint_policies/policy_manager.masm new file mode 100644 index 0000000000..2d8842e80b --- /dev/null +++ b/crates/miden-standards/asm/standards/mint_policies/policy_manager.masm @@ -0,0 +1,207 @@ +use miden::core::word +use miden::protocol::active_account +use miden::protocol::native_account +use miden::standards::access::ownable2step + +# DEPENDENCY NOTE +# This manager supports two policy-authority modes: +# - 0: auth_controlled: no Ownable2Step dependency. +# - 1: owner_controlled: requires Ownable2Step component +# (`ownable2step::assert_sender_is_owner`) for `set_mint_policy`. + +# CONSTANTS +# ================================================================================================ + +# Active policy root slot. +# Layout: [PROC_ROOT] +const ACTIVE_POLICY_PROC_ROOT_SLOT=word("miden::standards::mint_policy_manager::active_policy_proc_root") + +# Allowlist map slot for policy roots. +# Map entries: [PROC_ROOT] -> [1, 0, 0, 0] +# This slot ensures the policy manager runs only allowed mint-policy roots, not arbitrary procedures. +# A root that is not in this allowlist is always rejected, even if it exists in account code. +# This prevents arbitrary procedure root execution through policy selection. +# Component constructors initialize this map with known allowed mint-policy roots by default. +# `set_mint_policy` and `execute_mint_policy` checks this. +const ALLOWED_POLICY_PROC_ROOTS_SLOT=word("miden::standards::mint_policy_manager::allowed_policy_proc_roots") + +# Policy authority slot. +# Layout: [policy_authority, 0, 0, 0] +# - POLICY_AUTHORITY = 0: policy authority rely on `auth_controlled`. +# - POLICY_AUTHORITY = 1: `set_mint_policy` requires Ownable2Step owner check. +const POLICY_AUTHORITY_SLOT=word("miden::standards::mint_policy_manager::policy_authority") +const POLICY_AUTHORITY_OWNER_CONTROLLED=1 + +# Local memory pointer used to pass a policy root to `dynexec`. +const MINT_POLICY_PROC_ROOT_PTR=0 + +# ERRORS +# ================================================================================================ + +const ERR_MINT_POLICY_ROOT_IS_ZERO="mint policy root is zero" +const ERR_MINT_POLICY_ROOT_NOT_IN_ACCOUNT="mint policy root is not a procedure of this account" +const ERR_MINT_POLICY_ROOT_NOT_ALLOWED="mint policy root is not allowed" + +# INTERNAL PROCEDURES +# ================================================================================================ + +#! Reads active mint policy root from storage. +#! +#! Inputs: [] +#! Outputs: [MINT_POLICY_ROOT] +#! +#! Invocation: exec +proc get_mint_policy_root + push.ACTIVE_POLICY_PROC_ROOT_SLOT[0..2] exec.active_account::get_item + # => [MINT_POLICY_ROOT] +end + +#! Validates policy root before use. +#! +#! Inputs: [MINT_POLICY_ROOT] +#! Outputs: [MINT_POLICY_ROOT] +#! +#! Panics if: +#! - policy root is zero. +#! - policy root is not present in this account's procedures. +#! +#! Invocation: exec +proc assert_existing_policy_root + exec.word::testz + assertz.err=ERR_MINT_POLICY_ROOT_IS_ZERO + # => [MINT_POLICY_ROOT] + + dupw exec.active_account::has_procedure + assert.err=ERR_MINT_POLICY_ROOT_NOT_IN_ACCOUNT + # => [MINT_POLICY_ROOT] +end + +#! Validates that the policy root is one of the allowed policy roots configured for this account. +#! +#! Inputs: [MINT_POLICY_ROOT] +#! Outputs: [MINT_POLICY_ROOT] +#! +#! Panics if: +#! - policy root is not in the allowed policy roots map. +#! +#! Invocation: exec +proc assert_allowed_policy_root + dupw + push.ALLOWED_POLICY_PROC_ROOTS_SLOT[0..2] + exec.active_account::get_map_item + # => [ALLOWED_FLAG, MINT_POLICY_ROOT] + + exec.word::eqz + assertz.err=ERR_MINT_POLICY_ROOT_NOT_ALLOWED + # => [MINT_POLICY_ROOT] +end + +#! Reads policy authority mode. +#! - 0 = `auth_controlled` +#! - 1 = `owner_controlled` +#! +#! Inputs: [] +#! Outputs: [policy_authority] +#! +#! Invocation: exec +proc get_policy_authority + push.POLICY_AUTHORITY_SLOT[0..2] exec.active_account::get_item + # => [policy_authority, 0, 0, 0] + + movdn.3 + # => [0, 0, 0, policy_authority] + + drop drop drop + # => [policy_authority] +end + +#! Authorizes policy update based on policy authority mode. +#! +#! Inputs: [NEW_POLICY_ROOT, pad(12)] +#! Outputs: [NEW_POLICY_ROOT, pad(12)] +#! +#! Panics if: +#! - POLICY_AUTHORITY = 1 and the sender is not owner. +#! +#! Invocation: exec +proc assert_can_set_mint_policy + exec.get_policy_authority + # => [policy_authority, NEW_POLICY_ROOT, pad(12)] + + eq.POLICY_AUTHORITY_OWNER_CONTROLLED + if.true + exec.ownable2step::assert_sender_is_owner + # => [NEW_POLICY_ROOT, pad(12)] + end +end + +# PUBLIC INTERFACE +# ================================================================================================ + +#! Executes active mint policy by dynamic execution. +#! +#! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! Outputs: [amount, tag, note_type, RECIPIENT, pad(9)] +#! +#! Panics if: +#! - mint policy root is invalid. +#! - active policy predicate fails. +#! +#! Invocation: exec +@locals(4) +pub proc execute_mint_policy + exec.get_mint_policy_root + # => [MINT_POLICY_ROOT, amount, tag, note_type, RECIPIENT, pad(9)] + + exec.assert_existing_policy_root + # => [MINT_POLICY_ROOT, amount, tag, note_type, RECIPIENT, pad(9)] + + exec.assert_allowed_policy_root + # => [MINT_POLICY_ROOT, amount, tag, note_type, RECIPIENT, pad(9)] + + loc_storew_le.MINT_POLICY_PROC_ROOT_PTR dropw + # => [amount, tag, note_type, RECIPIENT, pad(9)] + + locaddr.MINT_POLICY_PROC_ROOT_PTR + # => [policy_root_ptr, amount, tag, note_type, RECIPIENT, pad(9)] + + dynexec + # => [amount, tag, note_type, RECIPIENT, pad(9)] +end + +#! Returns active mint policy root. +#! +#! Inputs: [pad(16)] +#! Outputs: [MINT_POLICY_ROOT, pad(12)] +#! +#! Invocation: call +pub proc get_mint_policy + exec.get_mint_policy_root + # => [MINT_POLICY_ROOT, pad(12)] +end + +#! Sets active mint policy root. +#! +#! Inputs: [NEW_POLICY_ROOT, pad(12)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - POLICY_AUTHORITY = 1 and the sender is not owner. +#! - NEW_POLICY_ROOT is zero. +#! - NEW_POLICY_ROOT is not a procedure of this account. +#! - NEW_POLICY_ROOT is not in the allowed roots map. +#! +#! Invocation: call +pub proc set_mint_policy + exec.assert_can_set_mint_policy + # => [NEW_POLICY_ROOT, pad(12)] + + exec.assert_existing_policy_root + # => [NEW_POLICY_ROOT, pad(12)] + + exec.assert_allowed_policy_root + # => [NEW_POLICY_ROOT, pad(12)] + + push.ACTIVE_POLICY_PROC_ROOT_SLOT[0..2] exec.native_account::set_item dropw + # => [pad(16)] +end diff --git a/crates/miden-standards/asm/standards/note_tag/mod.masm b/crates/miden-standards/asm/standards/note_tag/mod.masm index 4d476ae54a..83568c66ed 100644 --- a/crates/miden-standards/asm/standards/note_tag/mod.masm +++ b/crates/miden-standards/asm/standards/note_tag/mod.masm @@ -72,25 +72,25 @@ pub proc create_custom_account_target # since u32shl panics for a 32 shift, we need to use u64::shl in case tag_len is 0 # push u32::MAX as a u64 (hi limb set to zero) - push.0xffffffff push.0 - # => [0, u32::MAX, tag_len, account_id_prefix] + push.0 push.0xffffffff + # => [u32::MAX, 0, tag_len, account_id_prefix] # compute "number of bits in u32" - tag_len push.32 movup.3 sub - # => [shift_by, 0, u32::MAX, account_id_prefix] + # => [shift_by, u32::MAX, 0, account_id_prefix] exec.u64::shl - # => [bit_mask_hi, bit_mask_lo, account_id_prefix] + # => [bit_mask_lo, bit_mask_hi, account_id_prefix] # the mask we need is the lo limb so discard the hi limb - drop + swap drop # => [bit_mask, account_id_prefix] swap u32split - # => [account_id_prefix_hi, account_id_prefix_lo, bit_mask] + # => [account_id_prefix_lo, account_id_prefix_hi, bit_mask] # discard the lo part of the ID prefix - swap drop + drop # => [account_id_prefix_hi, bit_mask] u32and diff --git a/crates/miden-standards/asm/standards/notes/mint.masm b/crates/miden-standards/asm/standards/notes/mint.masm index 65e8a82a90..ca287daf48 100644 --- a/crates/miden-standards/asm/standards/notes/mint.masm +++ b/crates/miden-standards/asm/standards/notes/mint.masm @@ -24,11 +24,12 @@ const OUTPUT_PUBLIC_NOTE_STORAGE_ADDR=16 const ERR_MINT_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="MINT script expects exactly 12 storage items for private or 16+ storage items for public output notes" -#! Network Faucet MINT script: mints assets by calling the network faucet's distribute function. +#! Network Faucet MINT script: mints assets by calling the network faucet's mint_and_send +#! function. #! This note is intended to be executed against a network fungible faucet account. #! #! Requires that the account exposes: -#! - miden::standards::faucets::network_fungible::distribute procedure. +#! - miden::standards::faucets::network_fungible::mint_and_send procedure. #! #! Inputs: [ARGS, pad(12)] #! Outputs: [pad(16)] @@ -56,7 +57,7 @@ const ERR_MINT_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="MINT script expects exactly 1 #! The number of output note storage items = num_mint_note_storage_items - 16 #! #! Panics if: -#! - account does not expose distribute procedure. +#! - account does not expose mint_and_send procedure. #! - the number of storage items is not exactly 12 for private or less than 16 for public output notes. @note_script pub proc main @@ -80,10 +81,10 @@ pub proc main movdn.9 drop # => [EMPTY_WORD, EMPTY_WORD, num_storage_items, pad(8)] - mem_loadw_be.8 + mem_loadw_le.8 # => [SCRIPT_ROOT, EMPTY_WORD, num_storage_items, pad(8)] - swapw mem_loadw_be.12 + swapw mem_loadw_le.12 # => [SERIAL_NUM, SCRIPT_ROOT, num_storage_items, pad(8)] # compute variable length note storage for the output note @@ -109,7 +110,7 @@ pub proc main drop # => [pad(16)] - mem_loadw_be.8 + mem_loadw_le.8 # => [RECIPIENT, pad(12)] # push note_type, and load tag and amount @@ -119,12 +120,12 @@ pub proc main end # => [amount, tag, note_type, RECIPIENT, pad(12)] - # distribute expects 9 pad elements, returns 15 and 12 are provided here. + # mint_and_send expects 9 pad elements, returns 15 and 12 are provided here. # so the total number of pads after calling is 12 + (15-9) = 18 - call.network_faucet::distribute + call.network_faucet::mint_and_send # => [note_idx, pad(18))] - padw mem_loadw_be.ATTACHMENT_ADDRESS + padw mem_loadw_le.ATTACHMENT_ADDRESS # => [ATTACHMENT, note_idx, pad(18))] mem_load.ATTACHMENT_KIND_ADDRESS diff --git a/crates/miden-standards/asm/standards/notes/p2id.masm b/crates/miden-standards/asm/standards/notes/p2id.masm index d6a11dd53c..99abab6204 100644 --- a/crates/miden-standards/asm/standards/notes/p2id.masm +++ b/crates/miden-standards/asm/standards/notes/p2id.masm @@ -12,6 +12,16 @@ const ERR_P2ID_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="P2ID note expects exactly 2 n const ERR_P2ID_TARGET_ACCT_MISMATCH="P2ID's target account address and transaction address do not match" +# CONSTANTS +# ================================================================================================= + +const STORAGE_PTR = 0 +const TARGET_ACCOUNT_ID_SUFFIX_PTR = STORAGE_PTR +const TARGET_ACCOUNT_ID_PREFIX_PTR = STORAGE_PTR + 1 + +# PROCEDURES +# ================================================================================================= + #! Pay-to-ID script: adds all assets from the note to the account, assuming ID of the account #! matches target account ID specified by the note storage. #! @@ -33,19 +43,21 @@ const ERR_P2ID_TARGET_ACCT_MISMATCH="P2ID's target account address and transacti @note_script pub proc main # store the note storage to memory starting at address 0 - padw push.0 exec.active_note::get_storage - # => [num_storage_items, storage_ptr, EMPTY_WORD] + push.STORAGE_PTR exec.active_note::get_storage + # => [num_storage_items, storage_ptr] # make sure the number of storage items is 2 eq.2 assert.err=ERR_P2ID_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS - # => [storage_ptr, EMPTY_WORD] + # => [storage_ptr] # read the target account ID from the note storage - mem_loadw_be drop drop - # => [target_account_id_prefix, target_account_id_suffix] + drop + mem_load.TARGET_ACCOUNT_ID_PREFIX_PTR + mem_load.TARGET_ACCOUNT_ID_SUFFIX_PTR + # => [target_account_id_suffix, target_account_id_prefix] exec.active_account::get_id - # => [account_id_prefix, account_id_suffix, target_account_id_prefix, target_account_id_suffix, ...] + # => [account_id_suffix, account_id_prefix, target_account_id_suffix, target_account_id_prefix] # ensure account_id = target_account_id, fails otherwise exec.account_id::is_equal assert.err=ERR_P2ID_TARGET_ACCT_MISMATCH @@ -63,12 +75,12 @@ end #! - Obtaining the note script root via procref #! - Building the recipient and creating the note #! -#! Inputs: [target_id_prefix, target_id_suffix, tag, note_type, SERIAL_NUM] +#! Inputs: [target_id_suffix, target_id_prefix, tag, note_type, SERIAL_NUM] #! Outputs: [note_idx] #! #! Where: -#! - target_id_prefix is the prefix felt of the target account ID. #! - target_id_suffix is the suffix felt of the target account ID. +#! - target_id_prefix is the prefix felt of the target account ID. #! - tag is the note tag to be included in the note. #! - note_type is the storage type of the note (1 = public, 2 = private). #! - SERIAL_NUM is the serial number of the note (4 elements). @@ -77,9 +89,9 @@ end #! Invocation: exec @locals(2) pub proc new - # => [target_id_prefix, target_id_suffix, tag, note_type, SERIAL_NUM] + # => [target_id_suffix, target_id_prefix, tag, note_type, SERIAL_NUM] - loc_store.1 loc_store.0 + loc_store.TARGET_ACCOUNT_ID_SUFFIX_PTR loc_store.TARGET_ACCOUNT_ID_PREFIX_PTR # => [tag, note_type, SERIAL_NUM] movdn.5 movdn.5 @@ -91,7 +103,7 @@ pub proc new swapw # => [SERIAL_NUM, SCRIPT_ROOT, tag, note_type] - push.2 locaddr.0 + push.2 locaddr.STORAGE_PTR # => [storage_ptr, num_storage_items=2, SERIAL_NUM, SCRIPT_ROOT, tag, note_type] exec.note::build_recipient diff --git a/crates/miden-standards/asm/standards/notes/p2ide.masm b/crates/miden-standards/asm/standards/notes/p2ide.masm index 4e2ff77e9f..f476232e06 100644 --- a/crates/miden-standards/asm/standards/notes/p2ide.masm +++ b/crates/miden-standards/asm/standards/notes/p2ide.masm @@ -24,7 +24,7 @@ const ERR_P2IDE_TIMELOCK_HEIGHT_NOT_REACHED="failed to consume P2IDE note becaus #! #! Inputs: [current_block_height, timelock_block_height] #! Outputs: [current_block_height] -proc verify_unlocked +proc assert_unlocked dup movdn.2 # => [current_block_height, timelock_block_height, current_block_height] @@ -37,7 +37,7 @@ end #! #! Checks if P2IDE reclaim is enabled and if true, if reclaim height has been reached. #! -#! Inputs: [account_id_prefix, account_id_suffix, current_block_height, reclaim_block_height] +#! Inputs: [account_id_suffix, account_id_prefix, current_block_height, reclaim_block_height] #! Outputs: [] #! #! Panics if: @@ -47,18 +47,18 @@ end proc reclaim_note # check that the reclaim of the active note is enabled movup.3 dup neq.0 assert.err=ERR_P2IDE_RECLAIM_DISABLED - # => [reclaim_block_height, account_id_prefix, account_id_suffix, current_block_height] + # => [reclaim_block_height, account_id_suffix, account_id_prefix, current_block_height] # now check that sender is allowed to reclaim, reclaim block height <= current block height movup.3 - # => [current_block_height, reclaim_block_height, account_id_prefix, account_id_suffix] + # => [current_block_height, reclaim_block_height, account_id_suffix, account_id_prefix] lte assert.err=ERR_P2IDE_RECLAIM_HEIGHT_NOT_REACHED - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] # if active account is not the target, we need to ensure it is the sender exec.active_note::get_sender - # => [sender_account_id_prefix, sender_account_id_suffix, account_id_prefix, account_id_suffix] + # => [sender_account_id_suffix, sender_account_id_prefix, account_id_suffix, account_id_prefix] # ensure active account ID = sender account ID exec.account_id::is_equal assert.err=ERR_P2IDE_RECLAIM_ACCT_IS_NOT_SENDER @@ -110,25 +110,28 @@ pub proc main eq.4 assert.err=ERR_P2IDE_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS # => [storage_ptr] - # read the reclaim block height, timelock_block_height, and target account ID from the note storage - mem_loadw_be - # => [timelock_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # read the target account ID, reclaim block height, and timelock_block_height from the note storage + mem_loadw_le + # => [target_account_id_suffix, target_account_id_prefix, reclaim_block_height, timelock_block_height] + + movup.3 + # => [timelock_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] # read the current block number exec.tx::get_block_number - # => [current_block_height, timelock_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # => [current_block_height, timelock_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] - # fails if note is locked - exec.verify_unlocked - # => [current_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # assert note is unlocked + exec.assert_unlocked + # => [current_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] # get active account id exec.active_account::get_id dup.1 dup.1 - # => [account_id_prefix, account_id_suffix, account_id_prefix, account_id_suffix, current_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # => [account_id_suffix, account_id_prefix, account_id_suffix, account_id_prefix, current_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] # determine if the active account is the target account - movup.7 movup.7 exec.account_id::is_equal - # => [is_target, account_id_prefix, account_id_suffix, current_block_height, reclaim_block_height] + movup.6 movup.6 exec.account_id::is_equal + # => [is_target, account_id_suffix, account_id_prefix, current_block_height, reclaim_block_height] if.true # we can safely consume the note since the active account is the target of the note diff --git a/crates/miden-standards/asm/standards/notes/swap.masm b/crates/miden-standards/asm/standards/notes/swap.masm index 48f86cb97d..cedb7a6236 100644 --- a/crates/miden-standards/asm/standards/notes/swap.masm +++ b/crates/miden-standards/asm/standards/notes/swap.masm @@ -1,19 +1,21 @@ use miden::protocol::active_note +use miden::protocol::asset use miden::protocol::output_note use miden::standards::wallets::basic->wallet # CONSTANTS # ================================================================================================= -const SWAP_NOTE_NUM_STORAGE_ITEMS=16 +const SWAP_NOTE_NUM_STORAGE_ITEMS=20 -const PAYBACK_NOTE_TYPE_ADDRESS=0 -const PAYBACK_NOTE_TAG_ADDRESS=1 -const ATTACHMENT_KIND_ADDRESS=2 -const ATTACHMENT_SCHEME_ADDRESS=3 -const ATTACHMENT_ADDRESS=4 -const REQUESTED_ASSET_ADDRESS=8 -const PAYBACK_RECIPIENT_ADDRESS=12 +const PAYBACK_NOTE_TYPE_PTR=0 +const PAYBACK_NOTE_TAG_PTR=1 +const ATTACHMENT_KIND_PTR=2 +const ATTACHMENT_SCHEME_PTR=3 +const ATTACHMENT_PTR=4 +const REQUESTED_ASSET_PTR=8 +const PAYBACK_RECIPIENT_PTR=16 +const ASSET_PTR=20 # ERRORS # ================================================================================================= @@ -23,7 +25,7 @@ const ERR_SWAP_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="SWAP script expects exactly 1 const ERR_SWAP_WRONG_NUMBER_OF_ASSETS="SWAP script requires exactly 1 note asset" #! Swap script: adds an asset from the note into consumers account and -#! creates a note consumable by note issuer containing requested ASSET. +#! creates a note consumable by note issuer containing requested asset. #! #! Requires that the account exposes: #! - miden::standards::wallets::basic::receive_asset procedure. @@ -38,7 +40,8 @@ const ERR_SWAP_WRONG_NUMBER_OF_ASSETS="SWAP script requires exactly 1 note asset #! - attachment_kind #! - attachment_scheme #! - ATTACHMENT -#! - REQUESTED_ASSET +#! - REQUESTED_ASSET_KEY +#! - REQUESTED_ASSET_VALUE #! - PAYBACK_RECIPIENT #! #! Panics if: @@ -66,62 +69,55 @@ pub proc main drop # => [] - mem_loadw_be.REQUESTED_ASSET_ADDRESS - # => [REQUESTED_ASSET] - - padw mem_loadw_be.PAYBACK_RECIPIENT_ADDRESS - # => [PAYBACK_NOTE_RECIPIENT, REQUESTED_ASSET] + padw mem_loadw_le.PAYBACK_RECIPIENT_PTR + # => [PAYBACK_NOTE_RECIPIENT] # load payback P2ID details - mem_load.PAYBACK_NOTE_TYPE_ADDRESS - mem_load.PAYBACK_NOTE_TAG_ADDRESS - # => [tag, note_type, PAYBACK_NOTE_RECIPIENT, REQUESTED_ASSET] + mem_load.PAYBACK_NOTE_TYPE_PTR + mem_load.PAYBACK_NOTE_TAG_PTR + # => [tag, note_type, PAYBACK_NOTE_RECIPIENT] # create payback P2ID note exec.output_note::create - # => [note_idx, REQUESTED_ASSET] + # => [note_idx] - movdn.4 - # => [REQUESTED_ASSET, note_idx] + padw push.0.0.0 dup.7 + # => [note_idx, pad(7), note_idx] - # padding stack with 11 zeros - repeat.11 - push.0 - movdn.5 - end - # => [REQUESTED_ASSET, note_idx, pad(11)] + push.REQUESTED_ASSET_PTR exec.asset::load + # => [REQUESTED_ASSET_KEY, REQUESTED_ASSET_VALUE, note_idx, pad(7), note_idx] # move asset to the note call.wallet::move_asset_to_note - # => [REQUESTED_ASSET, note_idx, pad(11)] + # => [pad(16), note_idx] dropw - # => [note_idx, pad(11)] + mem_loadw_le.ATTACHMENT_PTR + # => [ATTACHMENT, pad(8), note_idx] - mem_loadw_be.ATTACHMENT_ADDRESS - mem_load.ATTACHMENT_KIND_ADDRESS - mem_load.ATTACHMENT_SCHEME_ADDRESS - movup.6 - # => [note_idx, attachment_scheme, attachment_kind, ATTACHMENT] + mem_load.ATTACHMENT_KIND_PTR + mem_load.ATTACHMENT_SCHEME_PTR + movup.14 + # => [note_idx, attachment_scheme, attachment_kind, ATTACHMENT, pad(8)] exec.output_note::set_attachment - # => [pad(12)] + # => [pad(8)] # --- move assets from the SWAP note into the account ------------------------- - # store the number of note assets to memory starting at address 0 - push.0 exec.active_note::get_assets - # => [num_assets, ptr, pad(12)] + # store the number of note assets to memory starting at address ASSET_PTR + push.ASSET_PTR exec.active_note::get_assets + # => [num_assets, asset_ptr, pad(8)] # make sure the number of assets is 1 assert.err=ERR_SWAP_WRONG_NUMBER_OF_ASSETS - # => [ptr, pad(12)] + # => [asset_ptr, pad(8)] - # load the ASSET - mem_loadw_be - # => [ASSET, pad(12)] + # load asset + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(8)] - # add the ASSET to the account + # add the asset to the account call.wallet::receive_asset # => [pad(16)] diff --git a/crates/miden-standards/asm/standards/wallets/basic.masm b/crates/miden-standards/asm/standards/wallets/basic.masm index 837803c09c..ad06b2b1a9 100644 --- a/crates/miden-standards/asm/standards/wallets/basic.masm +++ b/crates/miden-standards/asm/standards/wallets/basic.masm @@ -1,3 +1,5 @@ +use ::miden::protocol::asset::ASSET_VALUE_MEMORY_OFFSET +use ::miden::protocol::asset::ASSET_SIZE use miden::protocol::native_account use miden::protocol::output_note use miden::protocol::active_note @@ -8,11 +10,12 @@ const PUBLIC_NOTE=1 #! Adds the provided asset to the active account. #! -#! Inputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] #! Outputs: [pad(16)] #! #! Where: -#! - ASSET is the asset to be received, can be fungible or non-fungible +#! - ASSET_KEY is the vault key of the received asset. +#! - ASSET_VALUE is the value of the received asset. #! #! Panics if: #! - the same non-fungible asset already exists in the account. @@ -22,7 +25,7 @@ const PUBLIC_NOTE=1 #! Invocation: call pub proc receive_asset exec.native_account::add_asset - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] # drop the final asset dropw @@ -36,12 +39,13 @@ end #! the contents of the `PAD` elements shown below. It is the caller's responsibility to make sure #! these elements do not contain any meaningful data. #! -#! Inputs: [ASSET, note_idx, pad(11)] -#! Outputs: [ASSET, note_idx, pad(11)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] +#! Outputs: [pad(16)] #! #! Where: #! - note_idx is the index of the output note. -#! - ASSET is the fungible or non-fungible asset of interest. +#! - ASSET_KEY is the vault key of the asset to move to the note. +#! - ASSET_VALUE is the value of the asset to move to the note. #! #! Panics if: #! - the fungible asset is not found in the vault. @@ -50,71 +54,80 @@ end #! #! Invocation: call pub proc move_asset_to_note + dupw.1 dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + # remove the asset from the account exec.native_account::remove_asset - # => [ASSET, note_idx, pad(11)] - - dupw dup.8 movdn.4 - # => [ASSET, note_idx, ASSET, note_idx, pad(11)] + dropw + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] exec.output_note::add_asset - # => [ASSET, note_idx, pad(11)] + # => [pad(16)] end #! Adds all assets from the active note to the native account's vault. #! #! Inputs: [] #! Outputs: [] -@locals(1024) +@locals(2048) pub proc add_assets_to_account # write assets to local memory starting at offset 0 - # we have allocated 4 * MAX_ASSETS_PER_NOTE number of locals so all assets should fit + # we have allocated ASSET_SIZE * MAX_ASSETS_PER_NOTE number of locals so all assets should fit # since the asset memory will be overwritten, we don't have to initialize the locals to zero locaddr.0 exec.active_note::get_assets # => [num_of_assets, ptr = 0] # compute the pointer at which we should stop iterating - mul.4 dup.1 add + mul.ASSET_SIZE dup.1 add # => [end_ptr, ptr] # pad the stack and move the pointer to the top - padw movup.5 - # => [ptr, EMPTY_WORD, end_ptr] + padw padw movup.9 + # => [ptr, pad(8), end_ptr] # loop if the amount of assets is non-zero - dup dup.6 neq - # => [should_loop, ptr, EMPTY_WORD, end_ptr] + dup dup.10 neq + # => [should_loop, ptr, pad(8), end_ptr] while.true - # => [ptr, EMPTY_WORD, end_ptr] + # => [ptr, pad(8), end_ptr] # save the pointer so that we can use it later - dup movdn.5 - # => [ptr, EMPTY_WORD, ptr, end_ptr] + dup movdn.9 + # => [ptr, pad(8), ptr, end_ptr] + + # load the asset value + add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le swapw + # => [EMPTY_WORD, ASSET_VALUE, ptr, end_ptr] - # load the asset - mem_loadw_be - # => [ASSET, ptr, end_ptr] + # load the asset key + dup.8 mem_loadw_le + # => [ASSET_KEY, ASSET_VALUE, ptr, end_ptr] # pad the stack before call - padw swapw padw padw swapdw - # => [ASSET, pad(12), ptr, end_ptr] + padw padw swapdw + # => [ASSET_KEY, ASSET_VALUE, pad(8), ptr, end_ptr] # add asset to the account call.receive_asset # => [pad(16), ptr, end_ptr] # clean the stack after call - dropw dropw dropw - # => [EMPTY_WORD, ptr, end_ptr] + dropw dropw + # => [pad(8), ptr, end_ptr] + + # increment the pointer + movup.8 add.ASSET_SIZE dup + # => [ptr+ASSET_SIZE, ptr+ASSET_SIZE, pad(8), end_ptr] - # increment the pointer and continue looping if ptr != end_ptr - movup.4 add.4 dup dup.6 neq - # => [should_loop, ptr+4, EMPTY_WORD, end_ptr] + # continue looping if ptr != end_ptr + dup.10 neq + # => [should_loop, ptr+ASSET_SIZE, pad(8), end_ptr] end - # => [ptr', EMPTY_WORD, end_ptr] + # => [ptr', pad(8), end_ptr] # clear the stack - drop dropw drop + drop dropw dropw drop # => [] end diff --git a/crates/miden-standards/build.rs b/crates/miden-standards/build.rs index dc70d5a06e..d41e453948 100644 --- a/crates/miden-standards/build.rs +++ b/crates/miden-standards/build.rs @@ -9,19 +9,15 @@ use miden_protocol::transaction::TransactionKernel; // CONSTANTS // ================================================================================================ -/// Defines whether the build script should generate files in `/src`. -/// The docs.rs build pipeline has a read-only filesystem, so we have to avoid writing to `src`, -/// otherwise the docs will fail to build there. Note that writing to `OUT_DIR` is fine. -const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN_SRC").is_some(); - const ASSETS_DIR: &str = "assets"; const ASM_DIR: &str = "asm"; const ASM_STANDARDS_DIR: &str = "standards"; const ASM_ACCOUNT_COMPONENTS_DIR: &str = "account_components"; const STANDARDS_LIB_NAMESPACE: &str = "miden::standards"; +const ACCOUNT_COMPONENTS_LIB_NAMESPACE: &str = "miden::standards::components"; -const STANDARDS_ERRORS_FILE: &str = "src/errors/standards.rs"; +const STANDARDS_ERRORS_RS_FILE: &str = "standards_errors.rs"; const STANDARDS_ERRORS_ARRAY_NAME: &str = "STANDARDS_ERRORS"; // PRE-PROCESSING @@ -34,7 +30,6 @@ const STANDARDS_ERRORS_ARRAY_NAME: &str = "STANDARDS_ERRORS"; fn main() -> Result<()> { // re-build when the MASM code changes println!("cargo::rerun-if-changed={ASM_DIR}/"); - println!("cargo::rerun-if-env-changed=BUILD_GENERATED_FILES_IN_SRC"); // Copies the MASM code to the build directory let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); @@ -49,11 +44,10 @@ fn main() -> Result<()> { // set target directory to {OUT_DIR}/assets let target_dir = Path::new(&build_dir).join(ASSETS_DIR); + let mut assembler = TransactionKernel::assembler().with_warnings_as_errors(true); // compile standards library (includes note scripts) - let standards_lib = - compile_standards_lib(&source_dir, &target_dir, TransactionKernel::assembler())?; + let standards_lib = compile_standards_lib(&source_dir, &target_dir, assembler.clone())?; - let mut assembler = TransactionKernel::assembler(); assembler.link_static_library(standards_lib)?; // compile account components @@ -63,7 +57,7 @@ fn main() -> Result<()> { assembler, )?; - generate_error_constants(&source_dir)?; + generate_error_constants(&source_dir, &build_dir)?; Ok(()) } @@ -114,7 +108,19 @@ fn compile_account_components( let component_source_code = fs::read_to_string(&masm_file_path) .expect("reading the component's MASM source code should succeed"); - let named_source = NamedSource::new(component_name.clone(), component_source_code); + // Build full library path from directory structure: + // e.g. faucets/basic_fungible_faucet.masm -> + // miden::standards::components::faucets::basic_fungible_faucet + let relative_path = masm_file_path + .strip_prefix(source_dir) + .expect("masm file should be inside source dir"); + let mut library_path = ACCOUNT_COMPONENTS_LIB_NAMESPACE.to_owned(); + for component in relative_path.with_extension("").components() { + let part = component.as_os_str().to_str().expect("valid UTF-8"); + library_path.push_str("::"); + library_path.push_str(part); + } + let named_source = NamedSource::new(library_path, component_source_code); let component_library = assembler .clone() @@ -165,14 +171,9 @@ fn compile_account_components( /// The function ensures that a constant is not defined twice, except if their error message is the /// same. This can happen across multiple files. /// -/// Because the error files will be written to ./src/errors, this should be a no-op if ./src is -/// read-only. To enable writing to ./src, set the `BUILD_GENERATED_FILES_IN_SRC` environment -/// variable. -fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { - if !BUILD_GENERATED_FILES_IN_SRC { - return Ok(()); - } - +/// The generated file is written to `build_dir` (i.e. `OUT_DIR`) and included via `include!` +/// in the source. +fn generate_error_constants(asm_source_dir: &Path, build_dir: &str) -> Result<()> { // Miden standards errors // ------------------------------------------ @@ -180,7 +181,7 @@ fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { .context("failed to extract all masm errors")?; shared::generate_error_file( shared::ErrorModule { - file_name: STANDARDS_ERRORS_FILE, + file_path: Path::new(build_dir).join(STANDARDS_ERRORS_RS_FILE), array_name: STANDARDS_ERRORS_ARRAY_NAME, is_crate_local: false, }, @@ -380,7 +381,7 @@ mod shared { } /// Generates the content of an error file for the given category and the set of errors and - /// writes it to the category's file. + /// writes it to the file at the path specified in the module. pub fn generate_error_file(module: ErrorModule, errors: Vec) -> Result<()> { let mut output = String::new(); @@ -427,26 +428,11 @@ mod shared { .into_diagnostic()?; } - write_if_changed(module.file_name, output)?; + fs::write(module.file_path, output).into_diagnostic()?; Ok(()) } - /// Writes `contents` to `path` only if the file doesn't exist or its current contents - /// differ. This avoids updating the file's mtime when nothing changed, which prevents - /// cargo from treating the crate as dirty on the next build. - pub fn write_if_changed(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result<()> { - let path = path.as_ref(); - let new_contents = contents.as_ref(); - if path.exists() { - let existing = std::fs::read(path).into_diagnostic()?; - if existing == new_contents { - return Ok(()); - } - } - std::fs::write(path, new_contents).into_diagnostic() - } - pub type ErrorName = String; #[derive(Debug, Clone)] @@ -460,9 +446,9 @@ mod shared { pub message: String, } - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + #[derive(Debug, Clone)] pub struct ErrorModule { - pub file_name: &'static str, + pub file_path: PathBuf, pub array_name: &'static str, pub is_crate_local: bool, } diff --git a/crates/miden-standards/src/account/access/mod.rs b/crates/miden-standards/src/account/access/mod.rs new file mode 100644 index 0000000000..f7c58c875b --- /dev/null +++ b/crates/miden-standards/src/account/access/mod.rs @@ -0,0 +1,20 @@ +use miden_protocol::account::{AccountComponent, AccountId}; + +pub mod ownable2step; + +/// Access control configuration for account components. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AccessControl { + /// Uses two-step ownership transfer with the provided initial owner. + Ownable2Step { owner: AccountId }, +} + +impl From for AccountComponent { + fn from(access_control: AccessControl) -> Self { + match access_control { + AccessControl::Ownable2Step { owner } => Ownable2Step::new(owner).into(), + } + } +} + +pub use ownable2step::{Ownable2Step, Ownable2StepError}; diff --git a/crates/miden-standards/src/account/access/ownable2step.rs b/crates/miden-standards/src/account/access/ownable2step.rs new file mode 100644 index 0000000000..c5356394ab --- /dev/null +++ b/crates/miden-standards/src/account/access/ownable2step.rs @@ -0,0 +1,188 @@ +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountId, + AccountStorage, + AccountType, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::errors::AccountIdError; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use crate::account::components::ownable2step_library; + +static OWNER_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::access::ownable2step::owner_config") + .expect("storage slot name should be valid") +}); + +/// Two-step ownership management for account components. +/// +/// This struct holds the current owner and any nominated (pending) owner. A nominated owner +/// must explicitly accept the transfer before it takes effect, preventing accidental transfers +/// to incorrect addresses. +/// +/// ## Storage Layout +/// +/// The ownership data is stored in a single word: +/// +/// ```text +/// Word: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +/// word[0] word[1] word[2] word[3] +/// ``` +pub struct Ownable2Step { + /// The current owner of the component. `None` when ownership has been renounced. + owner: Option, + nominated_owner: Option, +} + +impl Ownable2Step { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::access::ownable2step"; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`Ownable2Step`] with the given owner and no nominated owner. + pub fn new(owner: AccountId) -> Self { + Self { + owner: Some(owner), + nominated_owner: None, + } + } + + /// Reads ownership data from account storage, validating any non-zero account IDs. + /// + /// Returns an error if either owner or nominated owner contains an invalid (but non-zero) + /// account ID. + pub fn try_from_storage(storage: &AccountStorage) -> Result { + let word: Word = storage + .get_item(Self::slot_name()) + .map_err(Ownable2StepError::StorageLookupFailed)?; + + Self::try_from_word(word) + } + + /// Reconstructs an [`Ownable2Step`] from a raw storage word. + /// + /// Format: `[owner_suffix, owner_prefix, nominated_suffix, nominated_prefix]` + pub fn try_from_word(word: Word) -> Result { + let owner = account_id_from_felt_pair(word[0], word[1]) + .map_err(Ownable2StepError::InvalidOwnerId)?; + + let nominated_owner = account_id_from_felt_pair(word[2], word[3]) + .map_err(Ownable2StepError::InvalidNominatedOwnerId)?; + + Ok(Self { owner, nominated_owner }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the [`StorageSlotName`] where ownership data is stored. + pub fn slot_name() -> &'static StorageSlotName { + &OWNER_CONFIG_SLOT_NAME + } + + /// Returns the storage slot schema for the ownership configuration slot. + pub fn slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::slot_name().clone(), + StorageSlotSchema::value( + "Ownership data (owner and nominated owner)", + [ + FeltSchema::felt("owner_suffix"), + FeltSchema::felt("owner_prefix"), + FeltSchema::felt("nominated_suffix"), + FeltSchema::felt("nominated_prefix"), + ], + ), + ) + } + + /// Returns the current owner, or `None` if ownership has been renounced. + pub fn owner(&self) -> Option { + self.owner + } + + /// Returns the nominated owner, or `None` if no transfer is in progress. + pub fn nominated_owner(&self) -> Option { + self.nominated_owner + } + + /// Converts this ownership data into a [`StorageSlot`]. + pub fn to_storage_slot(&self) -> StorageSlot { + StorageSlot::with_value(Self::slot_name().clone(), self.to_word()) + } + + /// Converts this ownership data into a raw [`Word`]. + pub fn to_word(&self) -> Word { + let (owner_suffix, owner_prefix) = match self.owner { + Some(id) => (id.suffix(), id.prefix().as_felt()), + None => (Felt::ZERO, Felt::ZERO), + }; + let (nominated_suffix, nominated_prefix) = match self.nominated_owner { + Some(id) => (id.suffix(), id.prefix().as_felt()), + None => (Felt::ZERO, Felt::ZERO), + }; + [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix].into() + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = + StorageSchema::new([Self::slot_schema()]).expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Two-step ownership management component") + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(ownership: Ownable2Step) -> Self { + let storage_slot = ownership.to_storage_slot(); + let metadata = Ownable2Step::component_metadata(); + + AccountComponent::new(ownable2step_library(), vec![storage_slot], metadata).expect( + "Ownable2Step component should satisfy the requirements of a valid account component", + ) + } +} + +// OWNABLE2STEP ERROR +// ================================================================================================ + +/// Errors that can occur when reading [`Ownable2Step`] data from storage. +#[derive(Debug, thiserror::Error)] +pub enum Ownable2StepError { + #[error("failed to read ownership slot from storage")] + StorageLookupFailed(#[source] miden_protocol::errors::AccountError), + #[error("invalid owner account ID in storage")] + InvalidOwnerId(#[source] AccountIdError), + #[error("invalid nominated owner account ID in storage")] + InvalidNominatedOwnerId(#[source] AccountIdError), +} + +// HELPERS +// ================================================================================================ + +/// Constructs an `Option` from a suffix/prefix felt pair. +/// Returns `Ok(None)` when both felts are zero (renounced / no nomination). +fn account_id_from_felt_pair( + suffix: Felt, + prefix: Felt, +) -> Result, AccountIdError> { + if suffix == Felt::ZERO && prefix == Felt::ZERO { + Ok(None) + } else { + AccountId::try_from_elements(suffix, prefix).map(Some) + } +} diff --git a/crates/miden-standards/src/account/auth/mod.rs b/crates/miden-standards/src/account/auth/mod.rs index c1c3a9791c..e999fab153 100644 --- a/crates/miden-standards/src/account/auth/mod.rs +++ b/crates/miden-standards/src/account/auth/mod.rs @@ -9,3 +9,6 @@ pub use singlesig_acl::{AuthSingleSigAcl, AuthSingleSigAclConfig}; mod multisig; pub use multisig::{AuthMultisig, AuthMultisigConfig}; + +mod multisig_psm; +pub use multisig_psm::{AuthMultisigPsm, AuthMultisigPsmConfig, PsmConfig}; diff --git a/crates/miden-standards/src/account/auth/multisig.rs b/crates/miden-standards/src/account/auth/multisig.rs index 86d6ebce8b..196bb3de0c 100644 --- a/crates/miden-standards/src/account/auth/multisig.rs +++ b/crates/miden-standards/src/account/auth/multisig.rs @@ -6,16 +6,26 @@ use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::component::{ AccountComponentMetadata, FeltSchema, - SchemaTypeId, + SchemaType, StorageSchema, StorageSlotSchema, }; -use miden_protocol::account::{AccountComponent, StorageMap, StorageSlot, StorageSlotName}; +use miden_protocol::account::{ + AccountComponent, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; use miden_protocol::errors::AccountError; use miden_protocol::utils::sync::LazyLock; use crate::account::components::multisig_library; +// CONSTANTS +// ================================================================================================ + static THRESHOLD_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { StorageSlotName::new("miden::standards::auth::multisig::threshold_config") .expect("storage slot name should be valid") @@ -116,19 +126,13 @@ impl AuthMultisigConfig { } } -/// An [`AccountComponent`] implementing a multisig based on ECDSA signatures. +/// An [`AccountComponent`] implementing a multisig authentication. /// /// It enforces a threshold of approver signatures for every transaction, with optional -/// per-procedure thresholds overrides. Non-uniform thresholds (especially a threshold of one) -/// should be used with caution for private multisig accounts, as a single approver could withhold -/// the new state from other approvers, effectively locking them out. -/// -/// The storage layout is: -/// - Slot 0(value): [threshold, num_approvers, 0, 0] -/// - Slot 1(map): A map with approver public keys (index -> pubkey) -/// - Slot 2(map): A map with approver scheme ids (index -> scheme_id) -/// - Slot 3(map): A map which stores executed transactions -/// - Slot 4(map): A map which stores procedure thresholds (PROC_ROOT -> threshold) +/// per-procedure threshold overrides. Non-uniform thresholds (especially a threshold of one) +/// should be used with caution for private multisig accounts, without Private State Manager (PSM), +/// a single approver may advance state and withhold updates from other approvers, effectively +/// locking them out. /// /// This component supports all account types. #[derive(Debug)] @@ -138,7 +142,7 @@ pub struct AuthMultisig { impl AuthMultisig { /// The name of the component. - pub const NAME: &'static str = "miden::auth::multisig"; + pub const NAME: &'static str = "miden::standards::components::auth::multisig"; /// Creates a new [`AuthMultisig`] component from the provided configuration. pub fn new(config: AuthMultisigConfig) -> Result { @@ -192,8 +196,8 @@ impl AuthMultisig { Self::approver_public_keys_slot().clone(), StorageSlotSchema::map( "Approver public keys", - SchemaTypeId::u32(), - SchemaTypeId::pub_key(), + SchemaType::u32(), + SchemaType::pub_key(), ), ) } @@ -204,8 +208,8 @@ impl AuthMultisig { Self::approver_scheme_ids_slot().clone(), StorageSlotSchema::map( "Approver scheme IDs", - SchemaTypeId::u32(), - SchemaTypeId::auth_scheme(), + SchemaType::u32(), + SchemaType::auth_scheme(), ), ) } @@ -216,8 +220,8 @@ impl AuthMultisig { Self::executed_transactions_slot().clone(), StorageSlotSchema::map( "Executed transactions", - SchemaTypeId::native_word(), - SchemaTypeId::native_word(), + SchemaType::native_word(), + SchemaType::native_word(), ), ) } @@ -228,11 +232,27 @@ impl AuthMultisig { Self::procedure_thresholds_slot().clone(), StorageSlotSchema::map( "Procedure thresholds", - SchemaTypeId::native_word(), - SchemaTypeId::u32(), + SchemaType::native_word(), + SchemaType::u32(), ), ) } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([ + Self::threshold_config_slot_schema(), + Self::approver_public_keys_slot_schema(), + Self::approver_auth_scheme_slot_schema(), + Self::executed_transactions_slot_schema(), + Self::procedure_thresholds_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Multisig authentication component using hybrid signature schemes") + .with_storage_schema(storage_schema) + } } impl From for AccountComponent { @@ -247,12 +267,10 @@ impl From for AccountComponent { )); // Approver public keys slot (map) - let map_entries = multisig - .config - .approvers() - .iter() - .enumerate() - .map(|(i, (pub_key, _))| (Word::from([i as u32, 0, 0, 0]), Word::from(*pub_key))); + let map_entries = + multisig.config.approvers().iter().enumerate().map(|(i, (pub_key, _))| { + (StorageMapKey::from_index(i as u32), Word::from(*pub_key)) + }); // Safe to unwrap because we know that the map keys are unique. storage_slots.push(StorageSlot::with_map( @@ -263,7 +281,7 @@ impl From for AccountComponent { // Approver scheme IDs slot (map): [index, 0, 0, 0] => [scheme_id, 0, 0, 0] let scheme_id_entries = multisig.config.approvers().iter().enumerate().map(|(i, (_, auth_scheme))| { - (Word::from([i as u32, 0, 0, 0]), Word::from([*auth_scheme as u32, 0, 0, 0])) + (StorageMapKey::from_index(i as u32), Word::from([*auth_scheme as u32, 0, 0, 0])) }); storage_slots.push(StorageSlot::with_map( @@ -280,11 +298,9 @@ impl From for AccountComponent { // Procedure thresholds slot (map: PROC_ROOT -> threshold) let proc_threshold_roots = StorageMap::with_entries( - multisig - .config - .proc_thresholds() - .iter() - .map(|(proc_root, threshold)| (*proc_root, Word::from([*threshold, 0, 0, 0]))), + multisig.config.proc_thresholds().iter().map(|(proc_root, threshold)| { + (StorageMapKey::from_raw(*proc_root), Word::from([*threshold, 0, 0, 0])) + }), ) .unwrap(); storage_slots.push(StorageSlot::with_map( @@ -292,19 +308,7 @@ impl From for AccountComponent { proc_threshold_roots, )); - let storage_schema = StorageSchema::new([ - AuthMultisig::threshold_config_slot_schema(), - AuthMultisig::approver_public_keys_slot_schema(), - AuthMultisig::approver_auth_scheme_slot_schema(), - AuthMultisig::executed_transactions_slot_schema(), - AuthMultisig::procedure_thresholds_slot_schema(), - ]) - .expect("storage schema should be valid"); - - let metadata = AccountComponentMetadata::new(AuthMultisig::NAME) - .with_description("Multisig authentication component using hybrid signature schemes") - .with_supports_all_types() - .with_storage_schema(storage_schema); + let metadata = AuthMultisig::component_metadata(); AccountComponent::new(multisig_library(), storage_slots, metadata).expect( "Multisig auth component should satisfy the requirements of a valid account component", @@ -312,6 +316,9 @@ impl From for AccountComponent { } } +// TESTS +// ================================================================================================ + #[cfg(test)] mod tests { use alloc::string::ToString; @@ -327,9 +334,9 @@ mod tests { #[test] fn test_multisig_component_setup() { // Create test secret keys - let sec_key_1 = AuthSecretKey::new_falcon512_rpo(); - let sec_key_2 = AuthSecretKey::new_falcon512_rpo(); - let sec_key_3 = AuthSecretKey::new_falcon512_rpo(); + let sec_key_1 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_2 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_3 = AuthSecretKey::new_falcon512_poseidon2(); // Create approvers list for multisig config let approvers = vec![ diff --git a/crates/miden-standards/src/account/auth/multisig_psm.rs b/crates/miden-standards/src/account/auth/multisig_psm.rs new file mode 100644 index 0000000000..1e9ecc34b2 --- /dev/null +++ b/crates/miden-standards/src/account/auth/multisig_psm.rs @@ -0,0 +1,588 @@ +use alloc::vec::Vec; + +use miden_protocol::Word; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; +use miden_protocol::account::component::{ + AccountComponentMetadata, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::errors::AccountError; +use miden_protocol::utils::sync::LazyLock; + +use super::multisig::{AuthMultisig, AuthMultisigConfig}; +use crate::account::components::multisig_psm_library; + +// CONSTANTS +// ================================================================================================ + +static PSM_PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::psm::pub_key") + .expect("storage slot name should be valid") +}); + +static PSM_SCHEME_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::psm::scheme") + .expect("storage slot name should be valid") +}); + +// MULTISIG AUTHENTICATION COMPONENT +// ================================================================================================ + +/// Configuration for [`AuthMultisigPsm`] component. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AuthMultisigPsmConfig { + multisig: AuthMultisigConfig, + psm_config: PsmConfig, +} + +/// Public configuration for the private state manager signer. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct PsmConfig { + pub_key: PublicKeyCommitment, + auth_scheme: AuthScheme, +} + +impl PsmConfig { + pub fn new(pub_key: PublicKeyCommitment, auth_scheme: AuthScheme) -> Self { + Self { pub_key, auth_scheme } + } + + pub fn pub_key(&self) -> PublicKeyCommitment { + self.pub_key + } + + pub fn auth_scheme(&self) -> AuthScheme { + self.auth_scheme + } + + fn public_key_slot() -> &'static StorageSlotName { + &PSM_PUBKEY_SLOT_NAME + } + + fn scheme_id_slot() -> &'static StorageSlotName { + &PSM_SCHEME_ID_SLOT_NAME + } + + fn public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::public_key_slot().clone(), + StorageSlotSchema::map( + "Private state manager public keys", + SchemaType::u32(), + SchemaType::pub_key(), + ), + ) + } + + fn auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::scheme_id_slot().clone(), + StorageSlotSchema::map( + "Private state manager scheme IDs", + SchemaType::u32(), + SchemaType::auth_scheme(), + ), + ) + } + + fn into_component_parts(self) -> (Vec, Vec<(StorageSlotName, StorageSlotSchema)>) { + let mut storage_slots = Vec::with_capacity(2); + + // Private state manager public key slot (map: [0, 0, 0, 0] -> pubkey) + let psm_public_key_entries = + [(StorageMapKey::from_raw(Word::from([0u32, 0, 0, 0])), Word::from(self.pub_key))]; + storage_slots.push(StorageSlot::with_map( + Self::public_key_slot().clone(), + StorageMap::with_entries(psm_public_key_entries).unwrap(), + )); + + // Private state manager scheme IDs slot (map: [0, 0, 0, 0] -> [scheme_id, 0, 0, 0]) + let psm_scheme_id_entries = [( + StorageMapKey::from_raw(Word::from([0u32, 0, 0, 0])), + Word::from([self.auth_scheme as u32, 0, 0, 0]), + )]; + storage_slots.push(StorageSlot::with_map( + Self::scheme_id_slot().clone(), + StorageMap::with_entries(psm_scheme_id_entries).unwrap(), + )); + + let slot_metadata = vec![Self::public_key_slot_schema(), Self::auth_scheme_slot_schema()]; + + (storage_slots, slot_metadata) + } +} + +impl AuthMultisigPsmConfig { + /// Creates a new configuration with the given approvers, default threshold and PSM signer. + /// + /// The `default_threshold` must be at least 1 and at most the number of approvers. + /// The private state manager public key must be different from all approver public keys. + pub fn new( + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + default_threshold: u32, + psm_config: PsmConfig, + ) -> Result { + let multisig = AuthMultisigConfig::new(approvers, default_threshold)?; + if multisig + .approvers() + .iter() + .any(|(approver, _)| *approver == psm_config.pub_key()) + { + return Err(AccountError::other( + "private state manager public key must be different from approvers", + )); + } + + Ok(Self { multisig, psm_config }) + } + + /// Attaches a per-procedure threshold map. Each procedure threshold must be at least 1 and + /// at most the number of approvers. + pub fn with_proc_thresholds( + mut self, + proc_thresholds: Vec<(Word, u32)>, + ) -> Result { + self.multisig = self.multisig.with_proc_thresholds(proc_thresholds)?; + Ok(self) + } + + pub fn approvers(&self) -> &[(PublicKeyCommitment, AuthScheme)] { + self.multisig.approvers() + } + + pub fn default_threshold(&self) -> u32 { + self.multisig.default_threshold() + } + + pub fn proc_thresholds(&self) -> &[(Word, u32)] { + self.multisig.proc_thresholds() + } + + pub fn psm_config(&self) -> PsmConfig { + self.psm_config + } + + fn into_parts(self) -> (AuthMultisigConfig, PsmConfig) { + (self.multisig, self.psm_config) + } +} + +/// An [`AccountComponent`] implementing a multisig authentication with a private state manager. +/// +/// It enforces a threshold of approver signatures for every transaction, with optional +/// per-procedure threshold overrides. With Private State Manager (PSM) is configured, +/// multisig authorization is combined with PSM authorization, so operations require both +/// multisig approval and a valid PSM signature. This substantially mitigates low-threshold +/// state-withholding scenarios since the PSM is expected to forward state updates to other +/// approvers. +/// +/// This component supports all account types. +#[derive(Debug)] +pub struct AuthMultisigPsm { + multisig: AuthMultisig, + psm_config: PsmConfig, +} + +impl AuthMultisigPsm { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::auth::multisig_psm"; + + /// Creates a new [`AuthMultisigPsm`] component from the provided configuration. + pub fn new(config: AuthMultisigPsmConfig) -> Result { + let (multisig_config, psm_config) = config.into_parts(); + Ok(Self { + multisig: AuthMultisig::new(multisig_config)?, + psm_config, + }) + } + + /// Returns the [`StorageSlotName`] where the threshold configuration is stored. + pub fn threshold_config_slot() -> &'static StorageSlotName { + AuthMultisig::threshold_config_slot() + } + + /// Returns the [`StorageSlotName`] where the approver public keys are stored. + pub fn approver_public_keys_slot() -> &'static StorageSlotName { + AuthMultisig::approver_public_keys_slot() + } + + // Returns the [`StorageSlotName`] where the approver scheme IDs are stored. + pub fn approver_scheme_ids_slot() -> &'static StorageSlotName { + AuthMultisig::approver_scheme_ids_slot() + } + + /// Returns the [`StorageSlotName`] where the executed transactions are stored. + pub fn executed_transactions_slot() -> &'static StorageSlotName { + AuthMultisig::executed_transactions_slot() + } + + /// Returns the [`StorageSlotName`] where the procedure thresholds are stored. + pub fn procedure_thresholds_slot() -> &'static StorageSlotName { + AuthMultisig::procedure_thresholds_slot() + } + + /// Returns the [`StorageSlotName`] where the private state manager public key is stored. + pub fn psm_public_key_slot() -> &'static StorageSlotName { + PsmConfig::public_key_slot() + } + + /// Returns the [`StorageSlotName`] where the private state manager scheme IDs are stored. + pub fn psm_scheme_id_slot() -> &'static StorageSlotName { + PsmConfig::scheme_id_slot() + } + + /// Returns the storage slot schema for the threshold configuration slot. + pub fn threshold_config_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::threshold_config_slot_schema() + } + + /// Returns the storage slot schema for the approver public keys slot. + pub fn approver_public_keys_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::approver_public_keys_slot_schema() + } + + // Returns the storage slot schema for the approver scheme IDs slot. + pub fn approver_auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::approver_auth_scheme_slot_schema() + } + + /// Returns the storage slot schema for the executed transactions slot. + pub fn executed_transactions_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::executed_transactions_slot_schema() + } + + /// Returns the storage slot schema for the procedure thresholds slot. + pub fn procedure_thresholds_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::procedure_thresholds_slot_schema() + } + + /// Returns the storage slot schema for the private state manager public key slot. + pub fn psm_public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + PsmConfig::public_key_slot_schema() + } + + /// Returns the storage slot schema for the private state manager scheme IDs slot. + pub fn psm_auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + PsmConfig::auth_scheme_slot_schema() + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([ + Self::threshold_config_slot_schema(), + Self::approver_public_keys_slot_schema(), + Self::approver_auth_scheme_slot_schema(), + Self::executed_transactions_slot_schema(), + Self::procedure_thresholds_slot_schema(), + Self::psm_public_key_slot_schema(), + Self::psm_auth_scheme_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description( + "Multisig authentication component with private state manager \ + using hybrid signature schemes", + ) + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(multisig: AuthMultisigPsm) -> Self { + let AuthMultisigPsm { multisig, psm_config } = multisig; + let multisig_component = AccountComponent::from(multisig); + let (psm_slots, psm_slot_metadata) = psm_config.into_component_parts(); + + let mut storage_slots = multisig_component.storage_slots().to_vec(); + storage_slots.extend(psm_slots); + + let mut slot_schemas: Vec<(StorageSlotName, StorageSlotSchema)> = multisig_component + .storage_schema() + .iter() + .map(|(slot_name, slot_schema)| (slot_name.clone(), slot_schema.clone())) + .collect(); + slot_schemas.extend(psm_slot_metadata); + + let storage_schema = + StorageSchema::new(slot_schemas).expect("storage schema should be valid"); + + let metadata = AccountComponentMetadata::new( + AuthMultisigPsm::NAME, + multisig_component.supported_types().clone(), + ) + .with_description(multisig_component.metadata().description()) + .with_version(multisig_component.metadata().version().clone()) + .with_storage_schema(storage_schema); + + AccountComponent::new(multisig_psm_library(), storage_slots, metadata).expect( + "Multisig auth component should satisfy the requirements of a valid account component", + ) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use alloc::string::ToString; + + use miden_protocol::Word; + use miden_protocol::account::AccountBuilder; + use miden_protocol::account::auth::AuthSecretKey; + + use super::*; + use crate::account::wallets::BasicWallet; + + /// Test multisig component setup with various configurations + #[test] + fn test_multisig_component_setup() { + // Create test secret keys + let sec_key_1 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_2 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_3 = AuthSecretKey::new_falcon512_poseidon2(); + let psm_key = AuthSecretKey::new_ecdsa_k256_keccak(); + + // Create approvers list for multisig config + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + (sec_key_3.public_key().to_commitment(), sec_key_3.auth_scheme()), + ]; + + let threshold = 2u32; + + // Create multisig component + let multisig_component = AuthMultisigPsm::new( + AuthMultisigPsmConfig::new( + approvers.clone(), + threshold, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ) + .expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + // Build account with multisig component + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + // Verify config slot: [threshold, num_approvers, 0, 0] + let config_slot = account + .storage() + .get_item(AuthMultisigPsm::threshold_config_slot()) + .expect("config storage slot access failed"); + assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); + + // Verify approver pub keys slot + for (i, (expected_pub_key, _)) in approvers.iter().enumerate() { + let stored_pub_key = account + .storage() + .get_map_item( + AuthMultisigPsm::approver_public_keys_slot(), + Word::from([i as u32, 0, 0, 0]), + ) + .expect("approver public key storage map access failed"); + assert_eq!(stored_pub_key, Word::from(*expected_pub_key)); + } + + // Verify approver scheme IDs slot + for (i, (_, expected_auth_scheme)) in approvers.iter().enumerate() { + let stored_scheme_id = account + .storage() + .get_map_item( + AuthMultisigPsm::approver_scheme_ids_slot(), + Word::from([i as u32, 0, 0, 0]), + ) + .expect("approver scheme ID storage map access failed"); + assert_eq!(stored_scheme_id, Word::from([*expected_auth_scheme as u32, 0, 0, 0])); + } + + // Verify private state manager signer is configured. + let psm_public_key = account + .storage() + .get_map_item(AuthMultisigPsm::psm_public_key_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager public key storage map access failed"); + assert_eq!(psm_public_key, Word::from(psm_key.public_key().to_commitment())); + + let psm_scheme_id = account + .storage() + .get_map_item(AuthMultisigPsm::psm_scheme_id_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager scheme ID storage map access failed"); + assert_eq!(psm_scheme_id, Word::from([psm_key.auth_scheme() as u32, 0, 0, 0])); + } + + /// Test multisig component with minimum threshold (1 of 1) + #[test] + fn test_multisig_component_minimum_threshold() { + let pub_key = AuthSecretKey::new_ecdsa_k256_keccak().public_key().to_commitment(); + let psm_key = AuthSecretKey::new_falcon512_poseidon2(); + let approvers = vec![(pub_key, AuthScheme::EcdsaK256Keccak)]; + let threshold = 1u32; + + let multisig_component = AuthMultisigPsm::new( + AuthMultisigPsmConfig::new( + approvers.clone(), + threshold, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ) + .expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + // Verify storage layout + let config_slot = account + .storage() + .get_item(AuthMultisigPsm::threshold_config_slot()) + .expect("config storage slot access failed"); + assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); + + let stored_pub_key = account + .storage() + .get_map_item(AuthMultisigPsm::approver_public_keys_slot(), Word::from([0u32, 0, 0, 0])) + .expect("approver pub keys storage map access failed"); + assert_eq!(stored_pub_key, Word::from(pub_key)); + + let stored_scheme_id = account + .storage() + .get_map_item(AuthMultisigPsm::approver_scheme_ids_slot(), Word::from([0u32, 0, 0, 0])) + .expect("approver scheme IDs storage map access failed"); + assert_eq!(stored_scheme_id, Word::from([AuthScheme::EcdsaK256Keccak as u32, 0, 0, 0])); + } + + /// Test multisig component setup with a private state manager. + #[test] + fn test_multisig_component_with_psm() { + let sec_key_1 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_2 = AuthSecretKey::new_falcon512_poseidon2(); + let psm_key = AuthSecretKey::new_ecdsa_k256_keccak(); + + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let multisig_component = AuthMultisigPsm::new( + AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ) + .expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + let psm_public_key = account + .storage() + .get_map_item(AuthMultisigPsm::psm_public_key_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager public key storage map access failed"); + assert_eq!(psm_public_key, Word::from(psm_key.public_key().to_commitment())); + + let psm_scheme_id = account + .storage() + .get_map_item(AuthMultisigPsm::psm_scheme_id_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager scheme ID storage map access failed"); + assert_eq!(psm_scheme_id, Word::from([psm_key.auth_scheme() as u32, 0, 0, 0])); + } + + /// Test multisig component error cases + #[test] + fn test_multisig_component_error_cases() { + let pub_key = AuthSecretKey::new_ecdsa_k256_keccak().public_key().to_commitment(); + let psm_key = AuthSecretKey::new_falcon512_poseidon2(); + let approvers = vec![(pub_key, AuthScheme::EcdsaK256Keccak)]; + + // Test threshold > number of approvers (should fail) + let result = AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ); + + assert!( + result + .unwrap_err() + .to_string() + .contains("threshold cannot be greater than number of approvers") + ); + } + + /// Test multisig component with duplicate approvers (should fail) + #[test] + fn test_multisig_component_duplicate_approvers() { + // Create secret keys for approvers + let sec_key_1 = AuthSecretKey::new_ecdsa_k256_keccak(); + let sec_key_2 = AuthSecretKey::new_ecdsa_k256_keccak(); + let psm_key = AuthSecretKey::new_falcon512_poseidon2(); + + // Create approvers list with duplicate public keys + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let result = AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ); + assert!( + result + .unwrap_err() + .to_string() + .contains("duplicate approver public keys are not allowed") + ); + } + + /// Test multisig component rejects a private state manager key which is already an approver. + #[test] + fn test_multisig_component_psm_not_approver() { + let sec_key_1 = AuthSecretKey::new_ecdsa_k256_keccak(); + let sec_key_2 = AuthSecretKey::new_ecdsa_k256_keccak(); + + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let result = AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + ); + + assert!( + result + .unwrap_err() + .to_string() + .contains("private state manager public key must be different from approvers") + ); + } +} diff --git a/crates/miden-standards/src/account/auth/no_auth.rs b/crates/miden-standards/src/account/auth/no_auth.rs index 51da556554..6fa7ab6911 100644 --- a/crates/miden-standards/src/account/auth/no_auth.rs +++ b/crates/miden-standards/src/account/auth/no_auth.rs @@ -1,5 +1,5 @@ -use miden_protocol::account::AccountComponent; use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{AccountComponent, AccountType}; use crate::account::components::no_auth_library; @@ -21,12 +21,18 @@ pub struct NoAuth; impl NoAuth { /// The name of the component. - pub const NAME: &'static str = "miden::auth::no_auth"; + pub const NAME: &'static str = "miden::standards::components::auth::no_auth"; /// Creates a new [`NoAuth`] component. pub fn new() -> Self { Self } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("No authentication component") + } } impl Default for NoAuth { @@ -37,15 +43,16 @@ impl Default for NoAuth { impl From for AccountComponent { fn from(_: NoAuth) -> Self { - let metadata = AccountComponentMetadata::new(NoAuth::NAME) - .with_description("No authentication component") - .with_supports_all_types(); + let metadata = NoAuth::component_metadata(); AccountComponent::new(no_auth_library(), vec![], metadata) .expect("NoAuth component should satisfy the requirements of a valid account component") } } +// TESTS +// ================================================================================================ + #[cfg(test)] mod tests { use miden_protocol::account::AccountBuilder; diff --git a/crates/miden-standards/src/account/auth/singlesig.rs b/crates/miden-standards/src/account/auth/singlesig.rs index 9fca5f2862..ee1e8401ef 100644 --- a/crates/miden-standards/src/account/auth/singlesig.rs +++ b/crates/miden-standards/src/account/auth/singlesig.rs @@ -2,15 +2,18 @@ use miden_protocol::Word; use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::component::{ AccountComponentMetadata, - SchemaTypeId, + SchemaType, StorageSchema, StorageSlotSchema, }; -use miden_protocol::account::{AccountComponent, StorageSlot, StorageSlotName}; +use miden_protocol::account::{AccountComponent, AccountType, StorageSlot, StorageSlotName}; use miden_protocol::utils::sync::LazyLock; use crate::account::components::singlesig_library; +// CONSTANTS +// ================================================================================================ + static PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { StorageSlotName::new("miden::standards::auth::singlesig::pub_key") .expect("storage slot name should be valid") @@ -42,7 +45,7 @@ pub struct AuthSingleSig { impl AuthSingleSig { /// The name of the component. - pub const NAME: &'static str = "miden::auth::singlesig"; + pub const NAME: &'static str = "miden::standards::components::auth::singlesig"; /// Creates a new [`AuthSingleSig`] component with the given `public_key`. pub fn new(pub_key: PublicKeyCommitment, auth_scheme: AuthScheme) -> Self { @@ -63,30 +66,36 @@ impl AuthSingleSig { pub fn public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { ( Self::public_key_slot().clone(), - StorageSlotSchema::value("Public key commitment", SchemaTypeId::pub_key()), + StorageSlotSchema::value("Public key commitment", SchemaType::pub_key()), ) } /// Returns the storage slot schema for the scheme ID slot. pub fn auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { ( Self::scheme_id_slot().clone(), - StorageSlotSchema::value("Scheme ID", SchemaTypeId::auth_scheme()), + StorageSlotSchema::value("Scheme ID", SchemaType::auth_scheme()), ) } -} -impl From for AccountComponent { - fn from(basic_signature: AuthSingleSig) -> Self { + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { let storage_schema = StorageSchema::new(vec![ - AuthSingleSig::public_key_slot_schema(), - AuthSingleSig::auth_scheme_slot_schema(), + Self::public_key_slot_schema(), + Self::auth_scheme_slot_schema(), ]) .expect("storage schema should be valid"); - let metadata = AccountComponentMetadata::new(AuthSingleSig::NAME) - .with_description("Authentication component using ECDSA K256 Keccak or Rpo Falcon 512 signature scheme") - .with_supports_all_types() - .with_storage_schema(storage_schema); + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description( + "Authentication component using ECDSA K256 Keccak or Falcon512 Poseidon2 signature scheme", + ) + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(basic_signature: AuthSingleSig) -> Self { + let metadata = AuthSingleSig::component_metadata(); let storage_slots = vec![ StorageSlot::with_value( diff --git a/crates/miden-standards/src/account/auth/singlesig_acl.rs b/crates/miden-standards/src/account/auth/singlesig_acl.rs index 9a25fef5d7..70ff9a1b1d 100644 --- a/crates/miden-standards/src/account/auth/singlesig_acl.rs +++ b/crates/miden-standards/src/account/auth/singlesig_acl.rs @@ -4,14 +4,16 @@ use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::component::{ AccountComponentMetadata, FeltSchema, - SchemaTypeId, + SchemaType, StorageSchema, StorageSlotSchema, }; use miden_protocol::account::{ AccountCode, AccountComponent, + AccountType, StorageMap, + StorageMapKey, StorageSlot, StorageSlotName, }; @@ -21,6 +23,9 @@ use miden_protocol::{Felt, Word}; use crate::account::components::singlesig_acl_library; +// CONSTANTS +// ================================================================================================ + static PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { StorageSlotName::new("miden::standards::auth::singlesig_acl::pub_key") .expect("storage slot name should be valid") @@ -91,7 +96,7 @@ impl Default for AuthSingleSigAclConfig { } /// An [`AccountComponent`] implementing a procedure-based Access Control List (ACL) using either -/// the EcdsaK256Keccak or Rpo Falcon 512 signature scheme for authentication of transactions. +/// the EcdsaK256Keccak or Falcon512 Poseidon2 signature scheme for authentication of transactions. /// /// This component provides fine-grained authentication control based on three conditions: /// 1. **Procedure-based authentication**: Requires authentication when any of the specified trigger @@ -152,7 +157,7 @@ pub struct AuthSingleSigAcl { impl AuthSingleSigAcl { /// The name of the component. - pub const NAME: &'static str = "miden::auth::singlesig_acl"; + pub const NAME: &'static str = "miden::standards::components::auth::singlesig_acl"; /// Creates a new [`AuthSingleSigAcl`] component with the given `public_key` and /// configuration. /// @@ -197,7 +202,7 @@ impl AuthSingleSigAcl { pub fn public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { ( Self::public_key_slot().clone(), - StorageSlotSchema::value("Public key commitment", SchemaTypeId::pub_key()), + StorageSlotSchema::value("Public key commitment", SchemaType::pub_key()), ) } @@ -209,8 +214,8 @@ impl AuthSingleSigAcl { "ACL configuration", [ FeltSchema::u32("num_trigger_procs").with_default(Felt::new(0)), - FeltSchema::u32("allow_unauthorized_output_notes").with_default(Felt::new(0)), - FeltSchema::u32("allow_unauthorized_input_notes").with_default(Felt::new(0)), + FeltSchema::bool("allow_unauthorized_output_notes").with_default(Felt::new(0)), + FeltSchema::bool("allow_unauthorized_input_notes").with_default(Felt::new(0)), FeltSchema::new_void(), ], ), @@ -221,7 +226,7 @@ impl AuthSingleSigAcl { pub fn auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { ( Self::scheme_id_slot().clone(), - StorageSlotSchema::value("Scheme ID", SchemaTypeId::auth_scheme()), + StorageSlotSchema::value("Scheme ID", SchemaType::auth_scheme()), ) } @@ -231,11 +236,28 @@ impl AuthSingleSigAcl { Self::trigger_procedure_roots_slot().clone(), StorageSlotSchema::map( "Trigger procedure roots", - SchemaTypeId::u32(), - SchemaTypeId::native_word(), + SchemaType::u32(), + SchemaType::native_word(), ), ) } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new(vec![ + Self::public_key_slot_schema(), + Self::auth_scheme_slot_schema(), + Self::config_slot_schema(), + Self::trigger_procedure_roots_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description( + "Authentication component with procedure-based ACL using ECDSA K256 Keccak or Falcon512 Poseidon2 signature scheme", + ) + .with_storage_schema(storage_schema) + } } impl From for AccountComponent { @@ -274,7 +296,7 @@ impl From for AccountComponent { .auth_trigger_procedures .iter() .enumerate() - .map(|(i, proc_root)| (Word::from([i as u32, 0, 0, 0]), *proc_root)); + .map(|(i, proc_root)| (StorageMapKey::from_index(i as u32), *proc_root)); // Safe to unwrap because we know that the map keys are unique. storage_slots.push(StorageSlot::with_map( @@ -282,18 +304,7 @@ impl From for AccountComponent { StorageMap::with_entries(map_entries).unwrap(), )); - let storage_schema = StorageSchema::new(vec![ - AuthSingleSigAcl::public_key_slot_schema(), - AuthSingleSigAcl::auth_scheme_slot_schema(), - AuthSingleSigAcl::config_slot_schema(), - AuthSingleSigAcl::trigger_procedure_roots_slot_schema(), - ]) - .expect("storage schema should be valid"); - - let metadata = AccountComponentMetadata::new(AuthSingleSigAcl::NAME) - .with_description("Authentication component with procedure-based ACL using ECDSA K256 Keccak or Rpo Falcon 512 signature scheme") - .with_supports_all_types() - .with_storage_schema(storage_schema); + let metadata = AuthSingleSigAcl::component_metadata(); AccountComponent::new(singlesig_acl_library(), storage_slots, metadata).expect( "singlesig ACL component should satisfy the requirements of a valid account component", @@ -301,6 +312,9 @@ impl From for AccountComponent { } } +// TESTS +// ================================================================================================ + #[cfg(test)] mod tests { use miden_protocol::Word; @@ -335,7 +349,7 @@ mod tests { /// Parametrized test helper for ACL component testing fn test_acl_component(config: AclTestConfig) { let public_key = PublicKeyCommitment::from(Word::empty()); - let auth_scheme = AuthScheme::Falcon512Rpo; + let auth_scheme = AuthScheme::Falcon512Poseidon2; // Build the configuration let mut acl_config = AuthSingleSigAclConfig::new() diff --git a/crates/miden-standards/src/account/components/mod.rs b/crates/miden-standards/src/account/components/mod.rs index 80620f4413..a14d3ce523 100644 --- a/crates/miden-standards/src/account/components/mod.rs +++ b/crates/miden-standards/src/account/components/mod.rs @@ -1,11 +1,11 @@ use alloc::collections::BTreeSet; use alloc::vec::Vec; -use miden_processor::MastNodeExt; +use miden_processor::mast::MastNodeExt; use miden_protocol::Word; use miden_protocol::account::AccountProcedureRoot; use miden_protocol::assembly::{Library, LibraryExport}; -use miden_protocol::utils::Deserializable; +use miden_protocol::utils::serde::Deserializable; use miden_protocol::utils::sync::LazyLock; use crate::account::interface::AccountComponentInterface; @@ -22,6 +22,18 @@ static BASIC_WALLET_LIBRARY: LazyLock = LazyLock::new(|| { Library::read_from_bytes(bytes).expect("Shipped Basic Wallet library is well-formed") }); +// ACCESS LIBRARIES +// ================================================================================================ + +// Initialize the Ownable2Step library only once. +static OWNABLE2STEP_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!( + env!("OUT_DIR"), + "/assets/account_components/access/ownable2step.masl" + )); + Library::read_from_bytes(bytes).expect("Shipped Ownable2Step library is well-formed") +}); + // AUTH LIBRARIES // ================================================================================================ @@ -48,6 +60,15 @@ static MULTISIG_LIBRARY: LazyLock = LazyLock::new(|| { Library::read_from_bytes(bytes).expect("Shipped Multisig library is well-formed") }); +/// Initialize the Multisig PSM library only once. +static MULTISIG_PSM_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!( + env!("OUT_DIR"), + "/assets/account_components/auth/multisig_psm.masl" + )); + Library::read_from_bytes(bytes).expect("Shipped Multisig PSM library is well-formed") +}); + // Initialize the NoAuth library only once. static NO_AUTH_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = @@ -76,23 +97,39 @@ static NETWORK_FUNGIBLE_FAUCET_LIBRARY: LazyLock = LazyLock::new(|| { Library::read_from_bytes(bytes).expect("Shipped Network Fungible Faucet library is well-formed") }); -// METADATA LIBRARIES -// ================================================================================================ +// Initialize the Mint Policy Owner Controlled library only once. +static MINT_POLICY_OWNER_CONTROLLED_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!( + env!("OUT_DIR"), + "/assets/account_components/mint_policies/owner_controlled.masl" + )); + Library::read_from_bytes(bytes) + .expect("Shipped Mint Policy Owner Controlled library is well-formed") +}); -// Initialize the Storage Schema library only once. -static STORAGE_SCHEMA_LIBRARY: LazyLock = LazyLock::new(|| { +// Initialize the Mint Policy Auth Controlled library only once. +static MINT_POLICY_AUTH_CONTROLLED_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!( env!("OUT_DIR"), - "/assets/account_components/metadata/schema_commitment.masl" + "/assets/account_components/mint_policies/auth_controlled.masl" )); - Library::read_from_bytes(bytes).expect("Shipped Storage Schema library is well-formed") + Library::read_from_bytes(bytes) + .expect("Shipped Mint Policy Auth Controlled library is well-formed") }); +// METADATA LIBRARIES +// ================================================================================================ + /// Returns the Basic Wallet Library. pub fn basic_wallet_library() -> Library { BASIC_WALLET_LIBRARY.clone() } +/// Returns the Ownable2Step Library. +pub fn ownable2step_library() -> Library { + OWNABLE2STEP_LIBRARY.clone() +} + /// Returns the Basic Fungible Faucet Library. pub fn basic_fungible_faucet_library() -> Library { BASIC_FUNGIBLE_FAUCET_LIBRARY.clone() @@ -103,9 +140,14 @@ pub fn network_fungible_faucet_library() -> Library { NETWORK_FUNGIBLE_FAUCET_LIBRARY.clone() } -/// Returns the Storage Schema Library. -pub fn storage_schema_library() -> Library { - STORAGE_SCHEMA_LIBRARY.clone() +/// Returns the Mint Policy Owner Controlled Library. +pub fn owner_controlled_library() -> Library { + MINT_POLICY_OWNER_CONTROLLED_LIBRARY.clone() +} + +/// Returns the Mint Policy Auth Controlled Library. +pub fn auth_controlled_library() -> Library { + MINT_POLICY_AUTH_CONTROLLED_LIBRARY.clone() } /// Returns the Singlesig Library. @@ -123,6 +165,11 @@ pub fn multisig_library() -> Library { MULTISIG_LIBRARY.clone() } +/// Returns the Multisig PSM Library. +pub fn multisig_psm_library() -> Library { + MULTISIG_PSM_LIBRARY.clone() +} + /// Returns the NoAuth Library. pub fn no_auth_library() -> Library { NO_AUTH_LIBRARY.clone() @@ -140,6 +187,7 @@ pub enum StandardAccountComponent { AuthSingleSig, AuthSingleSigAcl, AuthMultisig, + AuthMultisigPsm, AuthNoAuth, } @@ -153,6 +201,7 @@ impl StandardAccountComponent { Self::AuthSingleSig => SINGLESIG_LIBRARY.as_ref(), Self::AuthSingleSigAcl => SINGLESIG_ACL_LIBRARY.as_ref(), Self::AuthMultisig => MULTISIG_LIBRARY.as_ref(), + Self::AuthMultisigPsm => MULTISIG_PSM_LIBRARY.as_ref(), Self::AuthNoAuth => NO_AUTH_LIBRARY.as_ref(), }; @@ -205,6 +254,9 @@ impl StandardAccountComponent { Self::AuthMultisig => { component_interface_vec.push(AccountComponentInterface::AuthMultisig) }, + Self::AuthMultisigPsm => { + component_interface_vec.push(AccountComponentInterface::AuthMultisigPsm) + }, Self::AuthNoAuth => { component_interface_vec.push(AccountComponentInterface::AuthNoAuth) }, @@ -223,6 +275,7 @@ impl StandardAccountComponent { Self::NetworkFungibleFaucet.extract_component(procedures_set, component_interface_vec); Self::AuthSingleSig.extract_component(procedures_set, component_interface_vec); Self::AuthSingleSigAcl.extract_component(procedures_set, component_interface_vec); + Self::AuthMultisigPsm.extract_component(procedures_set, component_interface_vec); Self::AuthMultisig.extract_component(procedures_set, component_interface_vec); Self::AuthNoAuth.extract_component(procedures_set, component_interface_vec); } diff --git a/crates/miden-standards/src/account/faucets/basic_fungible.rs b/crates/miden-standards/src/account/faucets/basic_fungible.rs index d2d7734dfc..71b94ce2b6 100644 --- a/crates/miden-standards/src/account/faucets/basic_fungible.rs +++ b/crates/miden-standards/src/account/faucets/basic_fungible.rs @@ -1,7 +1,7 @@ use miden_protocol::account::component::{ AccountComponentMetadata, FeltSchema, - SchemaTypeId, + SchemaType, StorageSchema, StorageSlotSchema, }; @@ -21,25 +21,28 @@ use super::{FungibleFaucetError, TokenMetadata}; use crate::account::AuthMethod; use crate::account::auth::{AuthSingleSigAcl, AuthSingleSigAclConfig}; use crate::account::components::basic_fungible_faucet_library; +use crate::account::mint_policies::AuthControlled; -/// The schema type ID for token symbols. -const TOKEN_SYMBOL_TYPE_ID: &str = "miden::standards::fungible_faucets::metadata::token_symbol"; +/// The schema type for token symbols. +const TOKEN_SYMBOL_TYPE: &str = "miden::standards::fungible_faucets::metadata::token_symbol"; use crate::account::interface::{AccountComponentInterface, AccountInterface, AccountInterfaceExt}; use crate::procedure_digest; // BASIC FUNGIBLE FAUCET ACCOUNT COMPONENT // ================================================================================================ -// Initialize the digest of the `distribute` procedure of the Basic Fungible Faucet only once. +// Initialize the digest of the `mint_and_send` procedure of the Basic Fungible Faucet only once. procedure_digest!( - BASIC_FUNGIBLE_FAUCET_DISTRIBUTE, - BasicFungibleFaucet::DISTRIBUTE_PROC_NAME, + BASIC_FUNGIBLE_FAUCET_MINT_AND_SEND, + BasicFungibleFaucet::NAME, + BasicFungibleFaucet::MINT_PROC_NAME, basic_fungible_faucet_library ); // Initialize the digest of the `burn` procedure of the Basic Fungible Faucet only once. procedure_digest!( BASIC_FUNGIBLE_FAUCET_BURN, + BasicFungibleFaucet::NAME, BasicFungibleFaucet::BURN_PROC_NAME, basic_fungible_faucet_library ); @@ -50,12 +53,12 @@ procedure_digest!( /// against this component, the `miden` library (i.e. /// [`ProtocolLib`](miden_protocol::ProtocolLib)) must be available to the assembler which is the /// case when using [`CodeBuilder`][builder]. The procedures of this component are: -/// - `distribute`, which mints an assets and create a note for the provided recipient. +/// - `mint_and_send`, which mints an assets and create a note for the provided recipient. /// - `burn`, which burns the provided asset. /// -/// The `distribute` procedure can be called from a transaction script and requires authentication -/// via the authentication component. The `burn` procedure can only be called from a note script -/// and requires the calling note to contain the asset to be burned. +/// The `mint_and_send` procedure can be called from a transaction script and requires +/// authentication via the authentication component. The `burn` procedure can only be called from a +/// note script and requires the calling note to contain the asset to be burned. /// This component must be combined with an authentication component. /// /// This component supports accounts of type [`AccountType::FungibleFaucet`]. @@ -74,13 +77,13 @@ impl BasicFungibleFaucet { // -------------------------------------------------------------------------------------------- /// The name of the component. - pub const NAME: &'static str = "miden::basic_fungible_faucet"; + pub const NAME: &'static str = "miden::standards::components::faucets::basic_fungible_faucet"; /// The maximum number of decimals supported by the component. pub const MAX_DECIMALS: u8 = TokenMetadata::MAX_DECIMALS; - const DISTRIBUTE_PROC_NAME: &str = "basic_fungible_faucet::distribute"; - const BURN_PROC_NAME: &str = "basic_fungible_faucet::burn"; + const MINT_PROC_NAME: &str = "mint_and_send"; + const BURN_PROC_NAME: &str = "burn"; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -148,7 +151,7 @@ impl BasicFungibleFaucet { /// Returns the storage slot schema for the metadata slot. pub fn metadata_slot_schema() -> (StorageSlotName, StorageSlotSchema) { - let token_symbol_type = SchemaTypeId::new(TOKEN_SYMBOL_TYPE_ID).expect("valid type id"); + let token_symbol_type = SchemaType::new(TOKEN_SYMBOL_TYPE).expect("valid type"); ( Self::metadata_slot().clone(), StorageSlotSchema::value( @@ -169,7 +172,7 @@ impl BasicFungibleFaucet { } /// Returns the symbol of the faucet. - pub fn symbol(&self) -> TokenSymbol { + pub fn symbol(&self) -> &TokenSymbol { self.metadata.symbol() } @@ -193,9 +196,9 @@ impl BasicFungibleFaucet { self.metadata.token_supply() } - /// Returns the digest of the `distribute` account procedure. - pub fn distribute_digest() -> Word { - *BASIC_FUNGIBLE_FAUCET_DISTRIBUTE + /// Returns the digest of the `mint_and_send` account procedure. + pub fn mint_and_send_digest() -> Word { + *BASIC_FUNGIBLE_FAUCET_MINT_AND_SEND } /// Returns the digest of the `burn` account procedure. @@ -203,6 +206,16 @@ impl BasicFungibleFaucet { *BASIC_FUNGIBLE_FAUCET_BURN } + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([Self::metadata_slot_schema()]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, [AccountType::FungibleFaucet]) + .with_description("Basic fungible faucet component for minting and burning tokens") + .with_storage_schema(storage_schema) + } + // MUTATORS // -------------------------------------------------------------------------------------------- @@ -221,14 +234,7 @@ impl BasicFungibleFaucet { impl From for AccountComponent { fn from(faucet: BasicFungibleFaucet) -> Self { let storage_slot = faucet.metadata.into(); - - let storage_schema = StorageSchema::new([BasicFungibleFaucet::metadata_slot_schema()]) - .expect("storage schema should be valid"); - - let metadata = AccountComponentMetadata::new(BasicFungibleFaucet::NAME) - .with_description("Basic fungible faucet component for minting and burning tokens") - .with_supported_type(AccountType::FungibleFaucet) - .with_storage_schema(storage_schema); + let metadata = BasicFungibleFaucet::component_metadata(); AccountComponent::new(basic_fungible_faucet_library(), vec![storage_slot], metadata) .expect("basic fungible faucet component should satisfy the requirements of a valid account component") @@ -260,17 +266,18 @@ impl TryFrom<&Account> for BasicFungibleFaucet { /// decimals, max supply). /// /// The basic faucet interface exposes two procedures: -/// - `distribute`, which mints an assets and create a note for the provided recipient. +/// - `mint_and_send`, which mints an assets and create a note for the provided recipient. /// - `burn`, which burns the provided asset. /// -/// The `distribute` procedure can be called from a transaction script and requires authentication -/// via the specified authentication scheme. The `burn` procedure can only be called from a note -/// script and requires the calling note to contain the asset to be burned. +/// The `mint_and_send` procedure can be called from a transaction script and requires +/// authentication via the specified authentication scheme. The `burn` procedure can only be called +/// from a note script and requires the calling note to contain the asset to be burned. /// /// The storage layout of the faucet account is defined by the combination of the following /// components (see their docs for details): /// - [`BasicFungibleFaucet`] /// - [`AuthSingleSigAcl`] +/// - [`AuthControlled`] pub fn create_basic_fungible_faucet( init_seed: [u8; 32], symbol: TokenSymbol, @@ -279,14 +286,14 @@ pub fn create_basic_fungible_faucet( account_storage_mode: AccountStorageMode, auth_method: AuthMethod, ) -> Result { - let distribute_proc_root = BasicFungibleFaucet::distribute_digest(); + let mint_proc_root = BasicFungibleFaucet::mint_and_send_digest(); let auth_component: AccountComponent = match auth_method { AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => AuthSingleSigAcl::new( pub_key, auth_scheme, AuthSingleSigAclConfig::new() - .with_auth_trigger_procedures(vec![distribute_proc_root]) + .with_auth_trigger_procedures(vec![mint_proc_root]) .with_allow_unauthorized_input_notes(true), ) .map_err(FungibleFaucetError::AccountError)? @@ -314,6 +321,7 @@ pub fn create_basic_fungible_faucet( .storage_mode(account_storage_mode) .with_auth_component(auth_component) .with_component(BasicFungibleFaucet::new(symbol, decimals, max_supply)?) + .with_component(AuthControlled::allow_all()) .build() .map_err(FungibleFaucetError::AccountError)?; @@ -326,8 +334,8 @@ pub fn create_basic_fungible_faucet( #[cfg(test)] mod tests { use assert_matches::assert_matches; + use miden_protocol::Word; use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; - use miden_protocol::{FieldElement, ONE, Word}; use super::{ AccountBuilder, @@ -345,9 +353,9 @@ mod tests { #[test] fn faucet_contract_creation() { - let pub_key_word = Word::new([ONE; 4]); + let pub_key_word = Word::new([Felt::ONE; 4]); let auth_method: AuthMethod = AuthMethod::SingleSig { - approver: (pub_key_word.into(), AuthScheme::Falcon512Rpo), + approver: (pub_key_word.into(), AuthScheme::Falcon512Poseidon2), }; // we need to use an initial seed to create the wallet account @@ -362,9 +370,10 @@ mod tests { let decimals = 2u8; let storage_mode = AccountStorageMode::Private; + let token_symbol_felt = token_symbol.as_element(); let faucet_account = create_basic_fungible_faucet( init_seed, - token_symbol, + token_symbol.clone(), decimals, max_supply, storage_mode, @@ -381,15 +390,15 @@ mod tests { // The config slot of the auth component stores: // [num_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, 0]. // - // With 1 trigger procedure (distribute), allow_unauthorized_output_notes=false, and + // With 1 trigger procedure (mint_and_send), allow_unauthorized_output_notes=false, and // allow_unauthorized_input_notes=true, this should be [1, 0, 1, 0]. assert_eq!( faucet_account.storage().get_item(AuthSingleSigAcl::config_slot()).unwrap(), [Felt::ONE, Felt::ZERO, Felt::ONE, Felt::ZERO].into() ); - // The procedure root map should contain the distribute procedure root. - let distribute_root = BasicFungibleFaucet::distribute_digest(); + // The procedure root map should contain the mint_and_send procedure root. + let mint_root = BasicFungibleFaucet::mint_and_send_digest(); assert_eq!( faucet_account .storage() @@ -398,14 +407,14 @@ mod tests { [Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::ZERO].into() ) .unwrap(), - distribute_root + mint_root ); // Check that faucet metadata was initialized to the given values. // Storage layout: [token_supply, max_supply, decimals, symbol] assert_eq!( faucet_account.storage().get_item(BasicFungibleFaucet::metadata_slot()).unwrap(), - [Felt::ZERO, Felt::new(123), Felt::new(2), token_symbol.into()].into() + [Felt::ZERO, Felt::new(123), Felt::new(2), token_symbol_felt].into() ); assert!(faucet_account.is_faucet()); @@ -414,7 +423,7 @@ mod tests { // Verify the faucet can be extracted and has correct metadata let faucet_component = BasicFungibleFaucet::try_from(faucet_account.clone()).unwrap(); - assert_eq!(faucet_component.symbol(), token_symbol); + assert_eq!(faucet_component.symbol(), &token_symbol); assert_eq!(faucet_component.decimals(), decimals); assert_eq!(faucet_component.max_supply(), max_supply); assert_eq!(faucet_component.token_supply(), Felt::ZERO); @@ -432,16 +441,19 @@ mod tests { let faucet_account = AccountBuilder::new(mock_seed) .account_type(AccountType::FungibleFaucet) .with_component( - BasicFungibleFaucet::new(token_symbol, 10, Felt::new(100)) + BasicFungibleFaucet::new(token_symbol.clone(), 10, Felt::new(100)) .expect("failed to create a fungible faucet component"), ) - .with_auth_component(AuthSingleSig::new(mock_public_key, AuthScheme::Falcon512Rpo)) + .with_auth_component(AuthSingleSig::new( + mock_public_key, + AuthScheme::Falcon512Poseidon2, + )) .build_existing() .expect("failed to create wallet account"); let basic_ff = BasicFungibleFaucet::try_from(faucet_account) .expect("basic fungible faucet creation failed"); - assert_eq!(basic_ff.symbol(), token_symbol); + assert_eq!(basic_ff.symbol(), &token_symbol); assert_eq!(basic_ff.decimals(), 10); assert_eq!(basic_ff.max_supply(), Felt::new(100)); assert_eq!(basic_ff.token_supply(), Felt::ZERO); @@ -449,7 +461,7 @@ mod tests { // invalid account: basic fungible faucet component is missing let invalid_faucet_account = AccountBuilder::new(mock_seed) .account_type(AccountType::FungibleFaucet) - .with_auth_component(AuthSingleSig::new(mock_public_key, AuthScheme::Falcon512Rpo)) + .with_auth_component(AuthSingleSig::new(mock_public_key, AuthScheme::Falcon512Poseidon2)) // we need to add some other component so the builder doesn't fail .with_component(BasicWallet) .build_existing() @@ -464,7 +476,7 @@ mod tests { /// Check that the obtaining of the basic fungible faucet procedure digests does not panic. #[test] fn get_faucet_procedures() { - let _distribute_digest = BasicFungibleFaucet::distribute_digest(); + let _mint_and_send_digest = BasicFungibleFaucet::mint_and_send_digest(); let _burn_digest = BasicFungibleFaucet::burn_digest(); } } diff --git a/crates/miden-standards/src/account/faucets/mod.rs b/crates/miden-standards/src/account/faucets/mod.rs index 6b66c45697..df1f3adc16 100644 --- a/crates/miden-standards/src/account/faucets/mod.rs +++ b/crates/miden-standards/src/account/faucets/mod.rs @@ -4,6 +4,8 @@ use miden_protocol::account::StorageSlotName; use miden_protocol::errors::{AccountError, TokenSymbolError}; use thiserror::Error; +use crate::account::access::Ownable2StepError; + mod basic_fungible; mod network_fungible; mod token_metadata; @@ -46,8 +48,12 @@ pub enum FungibleFaucetError { }, #[error("unsupported authentication method: {0}")] UnsupportedAuthMethod(String), + #[error("unsupported access control method: {0}")] + UnsupportedAccessControl(String), #[error("account creation failed")] AccountError(#[source] AccountError), #[error("account is not a fungible faucet account")] NotAFungibleFaucetAccount, + #[error("failed to read ownership data from storage")] + OwnershipError(#[source] Ownable2StepError), } diff --git a/crates/miden-standards/src/account/faucets/network_fungible.rs b/crates/miden-standards/src/account/faucets/network_fungible.rs index db53a10dff..bb7ba4e6be 100644 --- a/crates/miden-standards/src/account/faucets/network_fungible.rs +++ b/crates/miden-standards/src/account/faucets/network_fungible.rs @@ -1,7 +1,7 @@ use miden_protocol::account::component::{ AccountComponentMetadata, FeltSchema, - SchemaTypeId, + SchemaType, StorageSchema, StorageSlotSchema, }; @@ -9,70 +9,68 @@ use miden_protocol::account::{ Account, AccountBuilder, AccountComponent, - AccountId, AccountStorage, AccountStorageMode, AccountType, - StorageSlot, StorageSlotName, }; use miden_protocol::asset::TokenSymbol; -use miden_protocol::utils::sync::LazyLock; use miden_protocol::{Felt, Word}; use super::{FungibleFaucetError, TokenMetadata}; +use crate::account::access::AccessControl; use crate::account::auth::NoAuth; use crate::account::components::network_fungible_faucet_library; - -/// The schema type ID for token symbols. -const TOKEN_SYMBOL_TYPE_ID: &str = "miden::standards::fungible_faucets::metadata::token_symbol"; use crate::account::interface::{AccountComponentInterface, AccountInterface, AccountInterfaceExt}; +use crate::account::mint_policies::OwnerControlled; use crate::procedure_digest; +/// The schema type for token symbols. +const TOKEN_SYMBOL_TYPE: &str = "miden::standards::fungible_faucets::metadata::token_symbol"; + // NETWORK FUNGIBLE FAUCET ACCOUNT COMPONENT // ================================================================================================ -// Initialize the digest of the `distribute` procedure of the Network Fungible Faucet only once. +// Initialize the digest of the `mint_and_send` procedure of the Network Fungible Faucet only once. procedure_digest!( - NETWORK_FUNGIBLE_FAUCET_DISTRIBUTE, - NetworkFungibleFaucet::DISTRIBUTE_PROC_NAME, + NETWORK_FUNGIBLE_FAUCET_MINT_AND_SEND, + NetworkFungibleFaucet::NAME, + NetworkFungibleFaucet::MINT_PROC_NAME, network_fungible_faucet_library ); // Initialize the digest of the `burn` procedure of the Network Fungible Faucet only once. procedure_digest!( NETWORK_FUNGIBLE_FAUCET_BURN, + NetworkFungibleFaucet::NAME, NetworkFungibleFaucet::BURN_PROC_NAME, network_fungible_faucet_library ); -static OWNER_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::access::ownable::owner_config") - .expect("storage slot name should be valid") -}); - /// An [`AccountComponent`] implementing a network fungible faucet. /// /// It reexports the procedures from `miden::standards::faucets::network_fungible`. When linking /// against this component, the `miden` library (i.e. /// [`ProtocolLib`](miden_protocol::ProtocolLib)) must be available to the assembler which is the /// case when using [`CodeBuilder`][builder]. The procedures of this component are: -/// - `distribute`, which mints an assets and create a note for the provided recipient. +/// - `mint_and_send`, which mints an assets and create a note for the provided recipient. /// - `burn`, which burns the provided asset. /// -/// Both `distribute` and `burn` can only be called from note scripts. `distribute` requires +/// Both `mint_and_send` and `burn` can only be called from note scripts. `mint_and_send` requires /// authentication while `burn` does not require authentication and can be called by anyone. /// Thus, this component must be combined with a component providing authentication. /// +/// This component relies on [`crate::account::access::Ownable2Step`] for ownership checks in +/// `mint_and_send`. When building an account with this component, +/// [`crate::account::access::Ownable2Step`] must also be included. +/// /// ## Storage Layout /// /// - [`Self::metadata_slot`]: Fungible faucet metadata. -/// - [`Self::owner_config_slot`]: The owner account of this network faucet. /// /// [builder]: crate::code_builder::CodeBuilder pub struct NetworkFungibleFaucet { metadata: TokenMetadata, - owner_account_id: AccountId, } impl NetworkFungibleFaucet { @@ -80,13 +78,13 @@ impl NetworkFungibleFaucet { // -------------------------------------------------------------------------------------------- /// The name of the component. - pub const NAME: &'static str = "miden::network_fungible_faucet"; + pub const NAME: &'static str = "miden::standards::components::faucets::network_fungible_faucet"; /// The maximum number of decimals supported by the component. pub const MAX_DECIMALS: u8 = TokenMetadata::MAX_DECIMALS; - const DISTRIBUTE_PROC_NAME: &str = "network_fungible_faucet::distribute"; - const BURN_PROC_NAME: &str = "network_fungible_faucet::burn"; + const MINT_PROC_NAME: &str = "mint_and_send"; + const BURN_PROC_NAME: &str = "burn"; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -102,18 +100,17 @@ impl NetworkFungibleFaucet { symbol: TokenSymbol, decimals: u8, max_supply: Felt, - owner_account_id: AccountId, ) -> Result { let metadata = TokenMetadata::new(symbol, decimals, max_supply)?; - Ok(Self { metadata, owner_account_id }) + Ok(Self { metadata }) } /// Creates a new [`NetworkFungibleFaucet`] component from the given [`TokenMetadata`]. /// /// This is a convenience constructor that allows creating a faucet from pre-validated /// metadata. - pub fn from_metadata(metadata: TokenMetadata, owner_account_id: AccountId) -> Self { - Self { metadata, owner_account_id } + pub fn from_metadata(metadata: TokenMetadata) -> Self { + Self { metadata } } /// Attempts to create a new [`NetworkFungibleFaucet`] component from the associated account @@ -144,21 +141,7 @@ impl NetworkFungibleFaucet { // Read token metadata from storage let metadata = TokenMetadata::try_from(storage)?; - // obtain owner account ID from the next storage slot - let owner_account_id_word: Word = storage - .get_item(NetworkFungibleFaucet::owner_config_slot()) - .map_err(|err| FungibleFaucetError::StorageLookupFailed { - slot_name: NetworkFungibleFaucet::owner_config_slot().clone(), - source: err, - })?; - - // Convert Word back to AccountId - // Storage format: [0, 0, suffix, prefix] - let prefix = owner_account_id_word[3]; - let suffix = owner_account_id_word[2]; - let owner_account_id = AccountId::new_unchecked([prefix, suffix]); - - Ok(Self { metadata, owner_account_id }) + Ok(Self { metadata }) } // PUBLIC ACCESSORS @@ -169,15 +152,9 @@ impl NetworkFungibleFaucet { TokenMetadata::metadata_slot() } - /// Returns the [`StorageSlotName`] where the [`NetworkFungibleFaucet`]'s owner configuration is - /// stored. - pub fn owner_config_slot() -> &'static StorageSlotName { - &OWNER_CONFIG_SLOT_NAME - } - /// Returns the storage slot schema for the metadata slot. pub fn metadata_slot_schema() -> (StorageSlotName, StorageSlotSchema) { - let token_symbol_type = SchemaTypeId::new(TOKEN_SYMBOL_TYPE_ID).expect("valid type id"); + let token_symbol_type = SchemaType::new(TOKEN_SYMBOL_TYPE).expect("valid type"); ( Self::metadata_slot().clone(), StorageSlotSchema::value( @@ -192,29 +169,13 @@ impl NetworkFungibleFaucet { ) } - /// Returns the storage slot schema for the owner configuration slot. - pub fn owner_config_slot_schema() -> (StorageSlotName, StorageSlotSchema) { - ( - Self::owner_config_slot().clone(), - StorageSlotSchema::value( - "Owner account configuration", - [ - FeltSchema::new_void(), - FeltSchema::new_void(), - FeltSchema::felt("owner_suffix"), - FeltSchema::felt("owner_prefix"), - ], - ), - ) - } - /// Returns the token metadata. pub fn metadata(&self) -> &TokenMetadata { &self.metadata } /// Returns the symbol of the faucet. - pub fn symbol(&self) -> TokenSymbol { + pub fn symbol(&self) -> &TokenSymbol { self.metadata.symbol() } @@ -238,14 +199,9 @@ impl NetworkFungibleFaucet { self.metadata.token_supply() } - /// Returns the owner account ID of the faucet. - pub fn owner_account_id(&self) -> AccountId { - self.owner_account_id - } - - /// Returns the digest of the `distribute` account procedure. - pub fn distribute_digest() -> Word { - *NETWORK_FUNGIBLE_FAUCET_DISTRIBUTE + /// Returns the digest of the `mint_and_send` account procedure. + pub fn mint_and_send_digest() -> Word { + *NETWORK_FUNGIBLE_FAUCET_MINT_AND_SEND } /// Returns the digest of the `burn` account procedure. @@ -266,40 +222,26 @@ impl NetworkFungibleFaucet { self.metadata = self.metadata.with_token_supply(token_supply)?; Ok(self) } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([Self::metadata_slot_schema()]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, [AccountType::FungibleFaucet]) + .with_description("Network fungible faucet component for minting and burning tokens") + .with_storage_schema(storage_schema) + } } impl From for AccountComponent { fn from(network_faucet: NetworkFungibleFaucet) -> Self { let metadata_slot = network_faucet.metadata.into(); - - // Convert AccountId into its Word encoding for storage. - let owner_account_id_word: Word = [ - Felt::new(0), - Felt::new(0), - network_faucet.owner_account_id.suffix(), - network_faucet.owner_account_id.prefix().as_felt(), - ] - .into(); - - let owner_slot = StorageSlot::with_value( - NetworkFungibleFaucet::owner_config_slot().clone(), - owner_account_id_word, - ); - - let storage_schema = StorageSchema::new([ - NetworkFungibleFaucet::metadata_slot_schema(), - NetworkFungibleFaucet::owner_config_slot_schema(), - ]) - .expect("storage schema should be valid"); - - let metadata = AccountComponentMetadata::new(NetworkFungibleFaucet::NAME) - .with_description("Network fungible faucet component for minting and burning tokens") - .with_supported_type(AccountType::FungibleFaucet) - .with_storage_schema(storage_schema); + let metadata = NetworkFungibleFaucet::component_metadata(); AccountComponent::new( network_fungible_faucet_library(), - vec![metadata_slot, owner_slot], + vec![metadata_slot], metadata, ) .expect("network fungible faucet component should satisfy the requirements of a valid account component") @@ -327,13 +269,13 @@ impl TryFrom<&Account> for NetworkFungibleFaucet { } /// Creates a new faucet account with network fungible faucet interface and provided metadata -/// (token symbol, decimals, max supply, owner account ID). +/// (token symbol, decimals, max supply) and access control. /// /// The network faucet interface exposes two procedures: -/// - `distribute`, which mints an assets and create a note for the provided recipient. +/// - `mint_and_send`, which mints an assets and create a note for the provided recipient. /// - `burn`, which burns the provided asset. /// -/// Both `distribute` and `burn` can only be called from note scripts. `distribute` requires +/// Both `mint_and_send` and `burn` can only be called from note scripts. `mint_and_send` requires /// authentication using the NoAuth scheme. `burn` does not require authentication and can be /// called by anyone. /// @@ -341,24 +283,88 @@ impl TryFrom<&Account> for NetworkFungibleFaucet { /// - [`AccountStorageMode::Network`] for storage /// - [`NoAuth`] for authentication /// -/// The storage layout of the faucet account is documented on the [`NetworkFungibleFaucet`] type and +/// The storage layout of the faucet account is documented on the [`NetworkFungibleFaucet`] and +/// [`OwnerControlled`] and [`crate::account::access::Ownable2Step`] component types and /// contains no additional storage slots for its auth ([`NoAuth`]). pub fn create_network_fungible_faucet( init_seed: [u8; 32], symbol: TokenSymbol, decimals: u8, max_supply: Felt, - owner_account_id: AccountId, + access_control: AccessControl, ) -> Result { + // Validate that access_control is Ownable2Step, as this faucet depends on it. + // When new variants are added to AccessControl, update this match to either support + // them or return Err(FungibleFaucetError::UnsupportedAccessControl). + match access_control { + AccessControl::Ownable2Step { .. } => {}, + #[allow(unreachable_patterns)] + _ => { + return Err(FungibleFaucetError::UnsupportedAccessControl( + "network fungible faucets require Ownable2Step access control".into(), + )); + }, + } + let auth_component: AccountComponent = NoAuth::new().into(); let account = AccountBuilder::new(init_seed) .account_type(AccountType::FungibleFaucet) .storage_mode(AccountStorageMode::Network) .with_auth_component(auth_component) - .with_component(NetworkFungibleFaucet::new(symbol, decimals, max_supply, owner_account_id)?) + .with_component(NetworkFungibleFaucet::new(symbol, decimals, max_supply)?) + .with_component(access_control) + .with_component(OwnerControlled::owner_only()) .build() .map_err(FungibleFaucetError::AccountError)?; Ok(account) } + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; + + use super::*; + use crate::account::access::Ownable2Step; + + #[test] + fn test_create_network_fungible_faucet() { + let init_seed = [7u8; 32]; + let symbol = TokenSymbol::new("NET").expect("token symbol should be valid"); + let decimals = 8u8; + let max_supply = Felt::new(1_000); + + let owner = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + + let account = create_network_fungible_faucet( + init_seed, + symbol.clone(), + decimals, + max_supply, + AccessControl::Ownable2Step { owner }, + ) + .expect("network faucet creation should succeed"); + + let expected_owner_word = Ownable2Step::new(owner).to_word(); + assert_eq!( + account.storage().get_item(Ownable2Step::slot_name()).unwrap(), + expected_owner_word + ); + + let faucet = NetworkFungibleFaucet::try_from(&account) + .expect("network fungible faucet should be extractable from account"); + assert_eq!(faucet.symbol(), &symbol); + assert_eq!(faucet.decimals(), decimals); + assert_eq!(faucet.max_supply(), max_supply); + assert_eq!(faucet.token_supply(), Felt::ZERO); + } +} diff --git a/crates/miden-standards/src/account/faucets/token_metadata.rs b/crates/miden-standards/src/account/faucets/token_metadata.rs index 37fb74c562..bdca915fa5 100644 --- a/crates/miden-standards/src/account/faucets/token_metadata.rs +++ b/crates/miden-standards/src/account/faucets/token_metadata.rs @@ -1,7 +1,7 @@ use miden_protocol::account::{AccountStorage, StorageSlot, StorageSlotName}; use miden_protocol::asset::{FungibleAsset, TokenSymbol}; use miden_protocol::utils::sync::LazyLock; -use miden_protocol::{Felt, FieldElement, Word}; +use miden_protocol::{Felt, Word}; use super::FungibleFaucetError; @@ -26,7 +26,7 @@ static METADATA_SLOT_NAME: LazyLock = LazyLock::new(|| { /// /// The metadata is stored in a single storage slot as: /// `[token_supply, max_supply, decimals, symbol]` -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone)] pub struct TokenMetadata { token_supply: Felt, max_supply: Felt, @@ -78,17 +78,17 @@ impl TokenMetadata { }); } - if max_supply.as_int() > FungibleAsset::MAX_AMOUNT { + if max_supply.as_canonical_u64() > FungibleAsset::MAX_AMOUNT { return Err(FungibleFaucetError::MaxSupplyTooLarge { - actual: max_supply.as_int(), + actual: max_supply.as_canonical_u64(), max: FungibleAsset::MAX_AMOUNT, }); } - if token_supply.as_int() > max_supply.as_int() { + if token_supply.as_canonical_u64() > max_supply.as_canonical_u64() { return Err(FungibleFaucetError::TokenSupplyExceedsMaxSupply { - token_supply: token_supply.as_int(), - max_supply: max_supply.as_int(), + token_supply: token_supply.as_canonical_u64(), + max_supply: max_supply.as_canonical_u64(), }); } @@ -124,8 +124,8 @@ impl TokenMetadata { } /// Returns the token symbol. - pub fn symbol(&self) -> TokenSymbol { - self.symbol + pub fn symbol(&self) -> &TokenSymbol { + &self.symbol } // MUTATORS @@ -138,10 +138,10 @@ impl TokenMetadata { /// Returns an error if: /// - the token supply exceeds the max supply. pub fn with_token_supply(mut self, token_supply: Felt) -> Result { - if token_supply.as_int() > self.max_supply.as_int() { + if token_supply.as_canonical_u64() > self.max_supply.as_canonical_u64() { return Err(FungibleFaucetError::TokenSupplyExceedsMaxSupply { - token_supply: token_supply.as_int(), - max_supply: self.max_supply.as_int(), + token_supply: token_supply.as_canonical_u64(), + max_supply: self.max_supply.as_canonical_u64(), }); } @@ -166,11 +166,12 @@ impl TryFrom for TokenMetadata { let symbol = TokenSymbol::try_from(token_symbol).map_err(FungibleFaucetError::InvalidTokenSymbol)?; - let decimals = - decimals.as_int().try_into().map_err(|_| FungibleFaucetError::TooManyDecimals { - actual: decimals.as_int(), + let decimals = decimals.as_canonical_u64().try_into().map_err(|_| { + FungibleFaucetError::TooManyDecimals { + actual: decimals.as_canonical_u64(), max: Self::MAX_DECIMALS, - })?; + } + })?; Self::with_supply(symbol, decimals, max_supply, token_supply) } @@ -183,7 +184,7 @@ impl From for Word { metadata.token_supply, metadata.max_supply, Felt::from(metadata.decimals), - metadata.symbol.into(), + metadata.symbol.as_element(), ]) } } @@ -236,7 +237,7 @@ impl TryFrom<&AccountStorage> for TokenMetadata { #[cfg(test)] mod tests { use miden_protocol::asset::TokenSymbol; - use miden_protocol::{Felt, FieldElement, Word}; + use miden_protocol::{Felt, Word}; use super::*; @@ -246,9 +247,9 @@ mod tests { let decimals = 8u8; let max_supply = Felt::new(1_000_000); - let metadata = TokenMetadata::new(symbol, decimals, max_supply).unwrap(); + let metadata = TokenMetadata::new(symbol.clone(), decimals, max_supply).unwrap(); - assert_eq!(metadata.symbol(), symbol); + assert_eq!(metadata.symbol(), &symbol); assert_eq!(metadata.decimals(), decimals); assert_eq!(metadata.max_supply(), max_supply); assert_eq!(metadata.token_supply(), Felt::ZERO); @@ -262,9 +263,9 @@ mod tests { let token_supply = Felt::new(500_000); let metadata = - TokenMetadata::with_supply(symbol, decimals, max_supply, token_supply).unwrap(); + TokenMetadata::with_supply(symbol.clone(), decimals, max_supply, token_supply).unwrap(); - assert_eq!(metadata.symbol(), symbol); + assert_eq!(metadata.symbol(), &symbol); assert_eq!(metadata.decimals(), decimals); assert_eq!(metadata.max_supply(), max_supply); assert_eq!(metadata.token_supply(), token_supply); @@ -296,6 +297,7 @@ mod tests { #[test] fn token_metadata_to_word() { let symbol = TokenSymbol::new("POL").unwrap(); + let symbol_felt = symbol.as_element(); let decimals = 2u8; let max_supply = Felt::new(123); @@ -306,7 +308,7 @@ mod tests { assert_eq!(word[0], Felt::ZERO); // token_supply assert_eq!(word[1], max_supply); assert_eq!(word[2], Felt::from(decimals)); - assert_eq!(word[3], symbol.into()); + assert_eq!(word[3], symbol_felt); } #[test] @@ -315,12 +317,12 @@ mod tests { let decimals = 2u8; let max_supply = Felt::new(123); - let original = TokenMetadata::new(symbol, decimals, max_supply).unwrap(); + let original = TokenMetadata::new(symbol.clone(), decimals, max_supply).unwrap(); let slot: StorageSlot = original.into(); let restored = TokenMetadata::try_from(&slot).unwrap(); - assert_eq!(restored.symbol(), symbol); + assert_eq!(restored.symbol(), &symbol); assert_eq!(restored.decimals(), decimals); assert_eq!(restored.max_supply(), max_supply); assert_eq!(restored.token_supply(), Felt::ZERO); @@ -334,11 +336,11 @@ mod tests { let token_supply = Felt::new(500); let original = - TokenMetadata::with_supply(symbol, decimals, max_supply, token_supply).unwrap(); + TokenMetadata::with_supply(symbol.clone(), decimals, max_supply, token_supply).unwrap(); let word: Word = original.into(); let restored = TokenMetadata::try_from(word).unwrap(); - assert_eq!(restored.symbol(), symbol); + assert_eq!(restored.symbol(), &symbol); assert_eq!(restored.decimals(), decimals); assert_eq!(restored.max_supply(), max_supply); assert_eq!(restored.token_supply(), token_supply); diff --git a/crates/miden-standards/src/account/interface/component.rs b/crates/miden-standards/src/account/interface/component.rs index b92d993c93..6527767f3f 100644 --- a/crates/miden-standards/src/account/interface/component.rs +++ b/crates/miden-standards/src/account/interface/component.rs @@ -7,7 +7,7 @@ use miden_protocol::note::PartialNote; use miden_protocol::{Felt, Word}; use crate::AuthMethod; -use crate::account::auth::{AuthMultisig, AuthSingleSig, AuthSingleSigAcl}; +use crate::account::auth::{AuthMultisig, AuthMultisigPsm, AuthSingleSig, AuthSingleSigAcl}; use crate::account::interface::AccountInterfaceError; // ACCOUNT COMPONENT INTERFACE @@ -33,6 +33,9 @@ pub enum AccountComponentInterface { /// Exposes procedures from the /// [`AuthMultisig`][crate::account::auth::AuthMultisig] module. AuthMultisig, + /// Exposes procedures from the + /// [`AuthMultisigPsm`][crate::account::auth::AuthMultisigPsm] module. + AuthMultisigPsm, /// Exposes procedures from the [`NoAuth`][crate::account::auth::NoAuth] module. /// /// This authentication scheme provides no cryptographic authentication and only increments @@ -61,6 +64,7 @@ impl AccountComponentInterface { AccountComponentInterface::AuthSingleSig => "SingleSig".to_string(), AccountComponentInterface::AuthSingleSigAcl => "SingleSig ACL".to_string(), AccountComponentInterface::AuthMultisig => "Multisig".to_string(), + AccountComponentInterface::AuthMultisigPsm => "Multisig PSM".to_string(), AccountComponentInterface::AuthNoAuth => "No Auth".to_string(), AccountComponentInterface::Custom(proc_root_vec) => { let result = proc_root_vec @@ -82,6 +86,7 @@ impl AccountComponentInterface { AccountComponentInterface::AuthSingleSig | AccountComponentInterface::AuthSingleSigAcl | AccountComponentInterface::AuthMultisig + | AccountComponentInterface::AuthMultisigPsm | AccountComponentInterface::AuthNoAuth ) } @@ -107,6 +112,14 @@ impl AccountComponentInterface { AuthMultisig::approver_scheme_ids_slot(), )] }, + AccountComponentInterface::AuthMultisigPsm => { + vec![extract_multisig_auth_method( + storage, + AuthMultisigPsm::threshold_config_slot(), + AuthMultisigPsm::approver_public_keys_slot(), + AuthMultisigPsm::approver_scheme_ids_slot(), + )] + }, AccountComponentInterface::AuthNoAuth => vec![AuthMethod::NoAuth], _ => vec![], // Non-auth components return empty vector } @@ -141,7 +154,7 @@ impl AccountComponentInterface { /// push.{note information} /// /// push.{asset amount} - /// call.::miden::standards::faucets::basic_fungible::distribute dropw dropw drop + /// call.::miden::standards::faucets::basic_fungible::mint_and_send dropw dropw drop /// ``` /// /// # Errors: @@ -149,7 +162,7 @@ impl AccountComponentInterface { /// - the interface does not support the generation of the standard `send_note` procedure. /// - the sender of the note isn't the account for which the script is being built. /// - the note created by the faucet doesn't contain exactly one asset. - /// - a faucet tries to distribute an asset with a different faucet ID. + /// - a faucet tries to mint an asset with a different faucet ID. pub(crate) fn send_note_body( &self, sender_account_id: AccountId, @@ -186,16 +199,16 @@ impl AccountComponentInterface { let asset = partial_note.assets().iter().next().expect("note should contain an asset"); - if asset.faucet_id_prefix() != sender_account_id.prefix() { + if asset.faucet_id() != sender_account_id { return Err(AccountInterfaceError::IssuanceFaucetMismatch( - asset.faucet_id_prefix(), + asset.faucet_id(), )); } body.push_str(&format!( " push.{amount} - call.::miden::standards::faucets::basic_fungible::distribute + call.::miden::standards::faucets::basic_fungible::mint_and_send # => [note_idx, pad(25)] swapdw dropw dropw swap drop # => [note_idx, pad(16)]\n @@ -214,13 +227,22 @@ impl AccountComponentInterface { for asset in partial_note.assets().iter() { body.push_str(&format!( " - push.{asset} - # => [ASSET, note_idx, pad(16)] + # duplicate note index + padw push.0 push.0 push.0 dup.7 + # => [note_idx, pad(7), note_idx, pad(16)] + + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7), note_idx, pad(16)] + call.::miden::standards::wallets::basic::move_asset_to_note - dropw + # => [pad(16), note_idx, pad(16)] + + dropw dropw dropw dropw # => [note_idx, pad(16)]\n ", - asset = Word::from(*asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), )); } }, @@ -270,7 +292,7 @@ fn extract_singlesig_auth_method( let scheme_id = storage .get_item(scheme_id_slot) .expect("invalid storage index of the scheme id")[0] - .as_int() as u8; + .as_canonical_u64() as u8; let auth_scheme = AuthScheme::try_from(scheme_id).expect("invalid auth scheme id in the scheme id slot"); @@ -291,8 +313,8 @@ fn extract_multisig_auth_method( .get_item(config_slot) .expect("invalid slot name of the multisig configuration"); - let threshold = config[0].as_int() as u32; - let num_approvers = config[1].as_int() as u8; + let threshold = config[0].as_canonical_u64() as u32; + let num_approvers = config[1].as_canonical_u64() as u8; let mut approvers = Vec::new(); @@ -322,7 +344,7 @@ fn extract_multisig_auth_method( ) }); - let scheme_id = scheme_word[0].as_int() as u8; + let scheme_id = scheme_word[0].as_canonical_u64() as u8; let auth_scheme = AuthScheme::try_from(scheme_id).expect("invalid auth scheme id in the scheme id slot"); approvers.push((pub_key, auth_scheme)); diff --git a/crates/miden-standards/src/account/interface/extension.rs b/crates/miden-standards/src/account/interface/extension.rs index dc0802072b..f23b1414a7 100644 --- a/crates/miden-standards/src/account/interface/extension.rs +++ b/crates/miden-standards/src/account/interface/extension.rs @@ -2,7 +2,7 @@ use alloc::collections::BTreeSet; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::MastNodeExt; +use miden_processor::mast::MastNodeExt; use miden_protocol::Word; use miden_protocol::account::{Account, AccountCode, AccountId, AccountProcedureRoot}; use miden_protocol::assembly::mast::{MastForest, MastNode, MastNodeId}; @@ -14,6 +14,7 @@ use crate::account::components::{ basic_fungible_faucet_library, basic_wallet_library, multisig_library, + multisig_psm_library, network_fungible_faucet_library, no_auth_library, singlesig_acl_library, @@ -112,6 +113,10 @@ impl AccountInterfaceExt for AccountInterface { component_proc_digests .extend(multisig_library().mast_forest().procedure_digests()); }, + AccountComponentInterface::AuthMultisigPsm => { + component_proc_digests + .extend(multisig_psm_library().mast_forest().procedure_digests()); + }, AccountComponentInterface::AuthNoAuth => { component_proc_digests .extend(no_auth_library().mast_forest().procedure_digests()); diff --git a/crates/miden-standards/src/account/interface/mod.rs b/crates/miden-standards/src/account/interface/mod.rs index 3b9746ae2d..a5409f34a3 100644 --- a/crates/miden-standards/src/account/interface/mod.rs +++ b/crates/miden-standards/src/account/interface/mod.rs @@ -1,7 +1,7 @@ use alloc::string::String; use alloc::vec::Vec; -use miden_protocol::account::{AccountId, AccountIdPrefix, AccountType}; +use miden_protocol::account::{AccountId, AccountType}; use miden_protocol::note::{NoteAttachmentContent, PartialNote}; use miden_protocol::transaction::TransactionScript; use thiserror::Error; @@ -134,7 +134,7 @@ impl AccountInterface { /// push.{note information} /// /// push.{asset amount} - /// call.::miden::standards::faucets::basic_fungible::distribute dropw dropw drop + /// call.::miden::standards::faucets::basic_fungible::mint_and_send dropw dropw drop /// end /// ``` /// @@ -144,7 +144,7 @@ impl AccountInterface { /// procedure. /// - the sender of the note isn't the account for which the script is being built. /// - the note created by the faucet doesn't contain exactly one asset. - /// - a faucet tries to distribute an asset with a different faucet ID. + /// - a faucet tries to mint an asset with a different faucet ID. /// /// [wallet]: crate::account::interface::AccountComponentInterface::BasicWallet /// [faucet]: crate::account::interface::AccountComponentInterface::BasicFungibleFaucet @@ -189,7 +189,7 @@ impl AccountInterface { /// procedure. /// - the sender of the note isn't the account for which the script is being built. /// - the note created by the faucet doesn't contain exactly one asset. - /// - a faucet tries to distribute an asset with a different faucet ID. + /// - a faucet tries to mint an asset with a different faucet ID. fn build_create_notes_section( &self, output_notes: &[PartialNote], @@ -249,8 +249,8 @@ pub enum NoteAccountCompatibility { /// Account interface related errors. #[derive(Debug, Error)] pub enum AccountInterfaceError { - #[error("note asset is not issued by this faucet: {0}")] - IssuanceFaucetMismatch(AccountIdPrefix), + #[error("note asset is not issued by faucet {0}")] + IssuanceFaucetMismatch(AccountId), #[error("note created by the basic fungible faucet doesn't contain exactly one asset")] FaucetNoteWithoutAsset, #[error("invalid transaction script")] diff --git a/crates/miden-standards/src/account/interface/test.rs b/crates/miden-standards/src/account/interface/test.rs index 127d90d5f8..5aef3f6203 100644 --- a/crates/miden-standards/src/account/interface/test.rs +++ b/crates/miden-standards/src/account/interface/test.rs @@ -3,7 +3,7 @@ use miden_protocol::account::auth::{self, PublicKeyCommitment}; use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{AccountBuilder, AccountComponent, AccountId, AccountType}; use miden_protocol::asset::{FungibleAsset, NonFungibleAsset, TokenSymbol}; -use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; +use miden_protocol::crypto::rand::{FeltRng, RandomCoin}; use miden_protocol::errors::NoteError; use miden_protocol::note::{ Note, @@ -72,7 +72,7 @@ fn test_basic_wallet_default_notes() { vec![FungibleAsset::mock(10)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ) .unwrap(); @@ -86,7 +86,7 @@ fn test_basic_wallet_default_notes() { vec![FungibleAsset::mock(10)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ) .unwrap(); @@ -101,7 +101,7 @@ fn test_basic_wallet_default_notes() { NoteAttachment::default(), NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ) .unwrap(); @@ -150,7 +150,7 @@ fn test_custom_account_default_note() { let account_code = CodeBuilder::default() .compile_component_code("test::account_custom", account_custom_code_source) .unwrap(); - let metadata = AccountComponentMetadata::new("test::account_custom").with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::account_custom", AccountType::all()); let account_component = AccountComponent::new(account_code, vec![], metadata).unwrap(); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); @@ -167,7 +167,7 @@ fn test_custom_account_default_note() { vec![FungibleAsset::mock(10)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ) .unwrap(); @@ -181,7 +181,7 @@ fn test_custom_account_default_note() { vec![FungibleAsset::mock(10)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ) .unwrap(); @@ -196,7 +196,7 @@ fn test_custom_account_default_note() { NoteAttachment::default(), NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ) .unwrap(); @@ -229,7 +229,7 @@ fn test_required_asset_same_as_offered() { NoteAttachment::default(), NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), ); assert_matches!(result, Err(NoteError::Other { error_msg, .. }) if error_msg == "requested asset same as offered asset".into()); @@ -250,7 +250,7 @@ fn test_basic_wallet_custom_notes() { let wallet_account_interface = AccountInterface::from_account(&wallet_account); let sender_account_id = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(); - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(wallet_account.id()); let metadata = NoteMetadata::new(sender_account_id, NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); @@ -268,7 +268,7 @@ fn test_basic_wallet_custom_notes() { call.wallet::move_asset_to_note # unsupported procs - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send call.fungible_faucet::burn else # supported procs @@ -293,11 +293,11 @@ fn test_basic_wallet_custom_notes() { push.1 if.true # unsupported procs - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send call.fungible_faucet::burn else # unsupported proc - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send # supported procs call.wallet::receive_asset @@ -333,7 +333,7 @@ fn test_basic_fungible_faucet_custom_notes() { let faucet_account_interface = AccountInterface::from_account(&faucet_account); let sender_account_id = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(); - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(faucet_account.id()); let metadata = NoteMetadata::new(sender_account_id, NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); @@ -346,11 +346,11 @@ fn test_basic_fungible_faucet_custom_notes() { push.1 if.true # supported procs - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send call.fungible_faucet::burn else # supported proc - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send # unsupported procs call.wallet::receive_asset @@ -374,7 +374,7 @@ fn test_basic_fungible_faucet_custom_notes() { push.1 if.true # supported procs - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send call.fungible_faucet::burn # unsupported proc @@ -417,8 +417,7 @@ fn test_custom_account_custom_notes() { let account_code = CodeBuilder::default() .compile_component_code("test::account::component_1", account_custom_code_source) .unwrap(); - let metadata = - AccountComponentMetadata::new("test::account::component_1").with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::account::component_1", AccountType::all()); let account_component = AccountComponent::new(account_code, vec![], metadata).unwrap(); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); @@ -437,7 +436,7 @@ fn test_custom_account_custom_notes() { .build_existing() .expect("failed to create wallet account"); - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(target_account.id()); let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); @@ -521,8 +520,7 @@ fn test_custom_account_multiple_components_custom_notes() { let custom_code = CodeBuilder::default() .compile_component_code("test::account::component_1", account_custom_code_source) .unwrap(); - let metadata = - AccountComponentMetadata::new("test::account::component_1").with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::account::component_1", AccountType::all()); let custom_component = AccountComponent::new(custom_code, vec![], metadata).unwrap(); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); @@ -542,7 +540,7 @@ fn test_custom_account_multiple_components_custom_notes() { .build_existing() .expect("failed to create wallet account"); - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(target_account.id()); let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); @@ -568,7 +566,7 @@ fn test_custom_account_multiple_components_custom_notes() { call.test_account::procedure_2 # unsupported proc - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send end end "; @@ -599,7 +597,7 @@ fn test_custom_account_multiple_components_custom_notes() { call.test_account::procedure_2 # unsupported proc - call.fungible_faucet::distribute + call.fungible_faucet::mint_and_send else # supported procs call.test_account::procedure_1 @@ -630,7 +628,7 @@ fn test_custom_account_multiple_components_custom_notes() { fn get_mock_falcon_auth_component() -> AuthSingleSig { let mock_word = Word::from([0, 1, 2, 3u32]); let mock_public_key = PublicKeyCommitment::from(mock_word); - AuthSingleSig::new(mock_public_key, auth::AuthScheme::Falcon512Rpo) + AuthSingleSig::new(mock_public_key, auth::AuthScheme::Falcon512Poseidon2) } /// Helper function to create a mock Ecdsa auth component for testing @@ -675,7 +673,7 @@ fn test_get_auth_scheme_ecdsa_k256_keccak() { } #[test] -fn test_get_auth_scheme_falcon512_rpo() { +fn test_get_auth_scheme_falcon512_poseidon2() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) .with_auth_component(get_mock_falcon_auth_component()) @@ -685,12 +683,12 @@ fn test_get_auth_scheme_falcon512_rpo() { let wallet_account_interface = AccountInterface::from_account(&wallet_account); - // Find the Falcon512Rpo component interface + // Find the single sig component interface let rpo_falcon_component = wallet_account_interface .components() .iter() .find(|component| matches!(component, AccountComponentInterface::AuthSingleSig)) - .expect("should have Falcon512Rpo component"); + .expect("should have single sig component"); // Test get_auth_methods method let auth_methods = rpo_falcon_component.get_auth_methods(wallet_account.storage()); @@ -699,9 +697,9 @@ fn test_get_auth_scheme_falcon512_rpo() { match auth_method { AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { assert_eq!(*pub_key, PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32]))); - assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Rpo); + assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Poseidon2); }, - _ => panic!("Expected Falcon512Rpo auth scheme"), + _ => panic!("Expected Falcon512Poseidon2 auth scheme"), } } @@ -767,7 +765,7 @@ fn test_account_interface_from_account_uses_get_auth_scheme() { AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { let expected_pub_key = PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32])); assert_eq!(*pub_key, expected_pub_key); - assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Rpo); + assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Poseidon2); }, _ => panic!("Expected SingleSig auth method"), } @@ -790,7 +788,7 @@ fn test_account_interface_from_account_uses_get_auth_scheme() { } } -/// Test AccountInterface.get_auth_scheme() method with Falcon512Rpo and NoAuth +/// Test AccountInterface.get_auth_scheme() method with Falcon512Poseidon2 and NoAuth #[test] fn test_account_interface_get_auth_scheme() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); @@ -807,7 +805,7 @@ fn test_account_interface_get_auth_scheme() { match &wallet_account_interface.auth()[0] { AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { assert_eq!(*pub_key, PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32]))); - assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Rpo); + assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Poseidon2); }, _ => panic!("Expected SingleSig auth method"), } @@ -856,8 +854,8 @@ fn test_public_key_extraction_multisig_account() { let pub_key_3 = PublicKeyCommitment::from(Word::from([3u32, 0, 0, 0])); let approvers = vec![ - (pub_key_1, auth::AuthScheme::Falcon512Rpo), - (pub_key_2, auth::AuthScheme::Falcon512Rpo), + (pub_key_1, auth::AuthScheme::Falcon512Poseidon2), + (pub_key_2, auth::AuthScheme::Falcon512Poseidon2), (pub_key_3, auth::AuthScheme::EcdsaK256Keccak), ]; diff --git a/crates/miden-standards/src/account/metadata/mod.rs b/crates/miden-standards/src/account/metadata/mod.rs index 9489bc034e..00560c232b 100644 --- a/crates/miden-standards/src/account/metadata/mod.rs +++ b/crates/miden-standards/src/account/metadata/mod.rs @@ -1,253 +1,3 @@ -use alloc::collections::BTreeMap; +mod schema_commitment; -use miden_protocol::Word; -use miden_protocol::account::component::{AccountComponentMetadata, StorageSchema}; -use miden_protocol::account::{ - Account, - AccountBuilder, - AccountComponent, - StorageSlot, - StorageSlotName, -}; -use miden_protocol::errors::{AccountError, ComponentMetadataError}; -use miden_protocol::utils::sync::LazyLock; - -use crate::account::components::storage_schema_library; - -pub static SCHEMA_COMMITMENT_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::metadata::storage_schema") - .expect("storage slot name should be valid") -}); - -/// An [`AccountComponent`] exposing the account storage schema commitment. -/// -/// The [`AccountSchemaCommitment`] component can be constructed from a list of [`StorageSchema`], -/// from which a commitment is computed and then inserted into the [`SCHEMA_COMMITMENT_SLOT_NAME`] -/// slot. -/// -/// It reexports the `get_schema_commitment` procedure from -/// `miden::standards::metadata::storage_schema`. -/// -/// ## Storage Layout -/// -/// - [`Self::schema_commitment_slot`]: Storage schema commitment. -pub struct AccountSchemaCommitment { - schema_commitment: Word, -} - -impl AccountSchemaCommitment { - /// Creates a new [`AccountSchemaCommitment`] component from storage schemas. - /// - /// The input schemas are merged into a single schema before the final commitment is computed. - /// - /// # Errors - /// - /// Returns an error if the schemas contain conflicting definitions for the same slot name. - pub fn new<'a>( - schemas: impl IntoIterator, - ) -> Result { - Ok(Self { - schema_commitment: compute_schema_commitment(schemas)?, - }) - } - - /// Creates a new [`AccountSchemaCommitment`] component from a [`StorageSchema`]. - pub fn from_schema(storage_schema: &StorageSchema) -> Result { - Self::new(core::slice::from_ref(storage_schema)) - } - - /// Returns the [`StorageSlotName`] where the schema commitment is stored. - pub fn schema_commitment_slot() -> &'static StorageSlotName { - &SCHEMA_COMMITMENT_SLOT_NAME - } -} - -impl From for AccountComponent { - fn from(schema_commitment: AccountSchemaCommitment) -> Self { - let metadata = AccountComponentMetadata::new("miden::metadata::schema_commitment") - .with_description("Component exposing the account storage schema commitment") - .with_supports_all_types(); - - AccountComponent::new( - storage_schema_library(), - vec![StorageSlot::with_value( - AccountSchemaCommitment::schema_commitment_slot().clone(), - schema_commitment.schema_commitment, - )], - metadata, - ) - .expect( - "AccountSchemaCommitment component should satisfy the requirements of a valid account component", - ) - } -} - -// ACCOUNT BUILDER EXTENSION -// ================================================================================================ - -/// An extension trait for [`AccountBuilder`] that provides a convenience method for building an -/// account with an [`AccountSchemaCommitment`] component. -pub trait AccountBuilderSchemaCommitmentExt { - /// Builds an [`Account`] out of the configured builder after computing the storage schema - /// commitment from all components currently in the builder and adding an - /// [`AccountSchemaCommitment`] component. - /// - /// # Errors - /// - /// Returns an error if: - /// - The components' storage schemas contain conflicting definitions for the same slot name. - /// - [`AccountBuilder::build`] fails. - fn build_with_schema_commitment(self) -> Result; -} - -impl AccountBuilderSchemaCommitmentExt for AccountBuilder { - fn build_with_schema_commitment(self) -> Result { - let schema_commitment = - AccountSchemaCommitment::new(self.storage_schemas()).map_err(|err| { - AccountError::other_with_source("failed to compute account schema commitment", err) - })?; - - self.with_component(schema_commitment).build() - } -} - -// HELPERS -// ================================================================================================ - -/// Computes the schema commitment. -/// -/// The account schema commitment is computed from the merged schema commitment. -/// If the passed list of schemas is empty, [`Word::empty()`] is returned. -fn compute_schema_commitment<'a>( - schemas: impl IntoIterator, -) -> Result { - let mut schemas = schemas.into_iter().peekable(); - if schemas.peek().is_none() { - return Ok(Word::empty()); - } - - let mut merged_slots = BTreeMap::new(); - - for schema in schemas { - for (slot_name, slot_schema) in schema.iter() { - match merged_slots.get(slot_name) { - None => { - merged_slots.insert(slot_name.clone(), slot_schema.clone()); - }, - // Slot exists, check if the schema is the same before erroring - Some(existing) => { - if existing != slot_schema { - return Err(ComponentMetadataError::InvalidSchema(format!( - "conflicting definitions for storage slot `{slot_name}`", - ))); - } - }, - } - } - } - - let merged_schema = StorageSchema::new(merged_slots)?; - - Ok(merged_schema.commitment()) -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod tests { - use miden_protocol::Word; - use miden_protocol::account::AccountBuilder; - use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; - use miden_protocol::account::component::AccountComponentMetadata; - - use super::{AccountBuilderSchemaCommitmentExt, AccountSchemaCommitment}; - use crate::account::auth::{AuthSingleSig, NoAuth}; - - #[test] - fn storage_schema_commitment_is_order_independent() { - let toml_a = r#" - name = "Component A" - description = "Component A schema" - version = "0.1.0" - supported-types = [] - - [[storage.slots]] - name = "test::slot_a" - type = "word" - "#; - - let toml_b = r#" - name = "Component B" - description = "Component B schema" - version = "0.1.0" - supported-types = [] - - [[storage.slots]] - name = "test::slot_b" - description = "description is committed to" - type = "word" - "#; - - let metadata_a = AccountComponentMetadata::from_toml(toml_a).unwrap(); - let metadata_b = AccountComponentMetadata::from_toml(toml_b).unwrap(); - - let schema_a = metadata_a.storage_schema().clone(); - let schema_b = metadata_b.storage_schema().clone(); - - // Create one component for each of two different accounts, but switch orderings - let component_a = - AccountSchemaCommitment::new(&[schema_a.clone(), schema_b.clone()]).unwrap(); - let component_b = AccountSchemaCommitment::new(&[schema_b, schema_a]).unwrap(); - - let account_a = AccountBuilder::new([1u8; 32]) - .with_auth_component(NoAuth) - .with_component(component_a) - .build() - .unwrap(); - - let account_b = AccountBuilder::new([2u8; 32]) - .with_auth_component(NoAuth) - .with_component(component_b) - .build() - .unwrap(); - - let slot_name = AccountSchemaCommitment::schema_commitment_slot(); - let commitment_a = account_a.storage().get_item(slot_name).unwrap(); - let commitment_b = account_b.storage().get_item(slot_name).unwrap(); - - assert_eq!(commitment_a, commitment_b); - } - - #[test] - fn storage_schema_commitment_is_empty_for_no_schemas() { - let component = AccountSchemaCommitment::new(&[]).unwrap(); - - assert_eq!(component.schema_commitment, Word::empty()); - } - - #[test] - fn build_with_schema_commitment_adds_schema_commitment_component() { - let auth_component = AuthSingleSig::new( - PublicKeyCommitment::from(Word::empty()), - AuthScheme::EcdsaK256Keccak, - ); - - let account = AccountBuilder::new([1u8; 32]) - .with_auth_component(auth_component) - .build_with_schema_commitment() - .unwrap(); - - // The auth component has 2 slots (public key and scheme ID) and the schema commitment adds - // 1 more. - assert_eq!(account.storage().num_slots(), 3); - - // The auth component's public key slot should be accessible. - assert!(account.storage().get_item(AuthSingleSig::public_key_slot()).is_ok()); - - // The schema commitment slot should be non-empty since we have a component with a schema. - let slot_name = AccountSchemaCommitment::schema_commitment_slot(); - let commitment = account.storage().get_item(slot_name).unwrap(); - assert_ne!(commitment, Word::empty()); - } -} +pub use schema_commitment::{AccountBuilderSchemaCommitmentExt, AccountSchemaCommitment}; diff --git a/crates/miden-standards/src/account/metadata/schema_commitment.rs b/crates/miden-standards/src/account/metadata/schema_commitment.rs new file mode 100644 index 0000000000..b044609d22 --- /dev/null +++ b/crates/miden-standards/src/account/metadata/schema_commitment.rs @@ -0,0 +1,288 @@ +use alloc::collections::BTreeMap; + +use miden_protocol::Word; +use miden_protocol::account::component::{ + AccountComponentMetadata, + SchemaType, + StorageSchema, + StorageSlotSchema, + WordSchema, +}; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountComponent, + AccountType, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::assembly::Library; +use miden_protocol::errors::{AccountError, ComponentMetadataError}; +use miden_protocol::utils::serde::Deserializable; +use miden_protocol::utils::sync::LazyLock; + +// CONSTANTS +// ================================================================================================ + +// Initialize the Storage Schema library only once. +static STORAGE_SCHEMA_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!( + env!("OUT_DIR"), + "/assets/account_components/metadata/schema_commitment.masl" + )); + Library::read_from_bytes(bytes).expect("Shipped Storage Schema library is well-formed") +}); + +static SCHEMA_COMMITMENT_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::metadata::storage_schema::commitment") + .expect("storage slot name should be valid") +}); + +// ACCOUNT SCHEMA COMMITMENT +// ================================================================================================ + +/// An [`AccountComponent`] exposing the account storage schema commitment. +/// +/// The [`AccountSchemaCommitment`] component can be constructed from a list of [`StorageSchema`], +/// from which a commitment is computed and then inserted into the +/// `miden::standards::metadata::storage_schema::commitment` slot. +/// +/// It reexports the `get_schema_commitment` procedure from +/// `miden::standards::metadata::storage_schema`. +/// +/// ## Storage Layout +/// +/// - [`Self::schema_commitment_slot`]: Storage schema commitment. +pub struct AccountSchemaCommitment { + schema_commitment: Word, +} + +impl AccountSchemaCommitment { + /// Name of the component is set to match the path of the corresponding module in the standards + /// library. + const NAME: &str = "miden::standards::metadata::storage_schema"; + + /// Creates a new [`AccountSchemaCommitment`] component from storage schemas. + /// + /// The input schemas are merged into a single schema before the final commitment is computed. + /// + /// # Errors + /// + /// Returns an error if the schemas contain conflicting definitions for the same slot name. + pub fn new<'a>( + schemas: impl IntoIterator, + ) -> Result { + Ok(Self { + schema_commitment: compute_schema_commitment(schemas)?, + }) + } + + /// Creates a new [`AccountSchemaCommitment`] component from a [`StorageSchema`]. + pub fn from_schema(storage_schema: &StorageSchema) -> Result { + Self::new(core::slice::from_ref(storage_schema)) + } + + /// Returns the [`StorageSlotName`] where the schema commitment is stored. + pub fn schema_commitment_slot() -> &'static StorageSlotName { + &SCHEMA_COMMITMENT_SLOT_NAME + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([( + Self::schema_commitment_slot().clone(), + StorageSlotSchema::value( + "Commitment to the storage schema of an account", + WordSchema::new_simple(SchemaType::native_word()), + ), + )]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Component exposing the account storage schema commitment") + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(schema_commitment: AccountSchemaCommitment) -> Self { + let metadata = AccountSchemaCommitment::component_metadata(); + let storage = vec![StorageSlot::with_value( + AccountSchemaCommitment::schema_commitment_slot().clone(), + schema_commitment.schema_commitment, + )]; + + AccountComponent::new(STORAGE_SCHEMA_LIBRARY.clone(), storage, metadata) + .expect("AccountSchemaCommitment is a valid account component") + } +} + +// ACCOUNT BUILDER EXTENSION +// ================================================================================================ + +/// An extension trait for [`AccountBuilder`] that provides a convenience method for building an +/// account with an [`AccountSchemaCommitment`] component. +pub trait AccountBuilderSchemaCommitmentExt { + /// Builds an [`Account`] out of the configured builder after computing the storage schema + /// commitment from all components currently in the builder and adding an + /// [`AccountSchemaCommitment`] component. + /// + /// # Errors + /// + /// Returns an error if: + /// - The components' storage schemas contain conflicting definitions for the same slot name. + /// - [`AccountBuilder::build`] fails. + fn build_with_schema_commitment(self) -> Result; +} + +impl AccountBuilderSchemaCommitmentExt for AccountBuilder { + fn build_with_schema_commitment(self) -> Result { + let schema_commitment = + AccountSchemaCommitment::new(self.storage_schemas()).map_err(|err| { + AccountError::other_with_source("failed to compute account schema commitment", err) + })?; + + self.with_component(schema_commitment).build() + } +} + +// HELPERS +// ================================================================================================ + +/// Computes the schema commitment. +/// +/// The account schema commitment is computed from the merged schema commitment. If the passed +/// list of schemas is empty, [`Word::empty()`] is returned. +fn compute_schema_commitment<'a>( + schemas: impl IntoIterator, +) -> Result { + let mut schemas = schemas.into_iter().peekable(); + if schemas.peek().is_none() { + return Ok(Word::empty()); + } + + let mut merged_slots = BTreeMap::new(); + + for schema in schemas { + for (slot_name, slot_schema) in schema.iter() { + match merged_slots.get(slot_name) { + None => { + merged_slots.insert(slot_name.clone(), slot_schema.clone()); + }, + // Slot exists, check if the schema is the same before erroring + Some(existing) => { + if existing != slot_schema { + return Err(ComponentMetadataError::InvalidSchema(format!( + "conflicting definitions for storage slot `{slot_name}`", + ))); + } + }, + } + } + } + + let merged_schema = StorageSchema::new(merged_slots)?; + + Ok(merged_schema.commitment()) +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::Word; + use miden_protocol::account::AccountBuilder; + use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; + use miden_protocol::account::component::AccountComponentMetadata; + + use super::{AccountBuilderSchemaCommitmentExt, AccountSchemaCommitment}; + use crate::account::auth::{AuthSingleSig, NoAuth}; + + #[test] + fn storage_schema_commitment_is_order_independent() { + let toml_a = r#" + name = "Component A" + description = "Component A schema" + version = "0.1.0" + supported-types = [] + + [[storage.slots]] + name = "test::slot_a" + type = "word" + "#; + + let toml_b = r#" + name = "Component B" + description = "Component B schema" + version = "0.1.0" + supported-types = [] + + [[storage.slots]] + name = "test::slot_b" + description = "description is committed to" + type = "word" + "#; + + let metadata_a = AccountComponentMetadata::from_toml(toml_a).unwrap(); + let metadata_b = AccountComponentMetadata::from_toml(toml_b).unwrap(); + + let schema_a = metadata_a.storage_schema().clone(); + let schema_b = metadata_b.storage_schema().clone(); + + // Create one component for each of two different accounts, but switch orderings + let component_a = + AccountSchemaCommitment::new(&[schema_a.clone(), schema_b.clone()]).unwrap(); + let component_b = AccountSchemaCommitment::new(&[schema_b, schema_a]).unwrap(); + + let account_a = AccountBuilder::new([1u8; 32]) + .with_auth_component(NoAuth) + .with_component(component_a) + .build() + .unwrap(); + + let account_b = AccountBuilder::new([2u8; 32]) + .with_auth_component(NoAuth) + .with_component(component_b) + .build() + .unwrap(); + + let slot_name = AccountSchemaCommitment::schema_commitment_slot(); + let commitment_a = account_a.storage().get_item(slot_name).unwrap(); + let commitment_b = account_b.storage().get_item(slot_name).unwrap(); + + assert_eq!(commitment_a, commitment_b); + } + + #[test] + fn storage_schema_commitment_is_empty_for_no_schemas() { + let component = AccountSchemaCommitment::new(&[]).unwrap(); + + assert_eq!(component.schema_commitment, Word::empty()); + } + + #[test] + fn build_with_schema_commitment_adds_schema_commitment_component() { + let auth_component = AuthSingleSig::new( + PublicKeyCommitment::from(Word::empty()), + AuthScheme::EcdsaK256Keccak, + ); + + let account = AccountBuilder::new([1u8; 32]) + .with_auth_component(auth_component) + .build_with_schema_commitment() + .unwrap(); + + // The auth component has 2 slots (public key and scheme ID) and the schema commitment adds + // 1 more. + assert_eq!(account.storage().num_slots(), 3); + + // The auth component's public key slot should be accessible. + assert!(account.storage().get_item(AuthSingleSig::public_key_slot()).is_ok()); + + // The schema commitment slot should be non-empty since we have a component with a schema. + let slot_name = AccountSchemaCommitment::schema_commitment_slot(); + let commitment = account.storage().get_item(slot_name).unwrap(); + assert_ne!(commitment, Word::empty()); + } +} diff --git a/crates/miden-standards/src/account/mint_policies/auth_controlled.rs b/crates/miden-standards/src/account/mint_policies/auth_controlled.rs new file mode 100644 index 0000000000..116810765e --- /dev/null +++ b/crates/miden-standards/src/account/mint_policies/auth_controlled.rs @@ -0,0 +1,224 @@ +use miden_protocol::Word; +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::utils::sync::LazyLock; + +use super::MintPolicyAuthority; +use crate::account::components::auth_controlled_library; +use crate::procedure_digest; + +// CONSTANTS +// ================================================================================================ + +procedure_digest!( + ALLOW_ALL_POLICY_ROOT, + AuthControlled::NAME, + AuthControlled::ALLOW_ALL_PROC_NAME, + auth_controlled_library +); + +static ACTIVE_MINT_POLICY_PROC_ROOT_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::mint_policy_manager::active_policy_proc_root") + .expect("storage slot name should be valid") +}); +static ALLOWED_MINT_POLICY_PROC_ROOTS_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::mint_policy_manager::allowed_policy_proc_roots") + .expect("storage slot name should be valid") +}); +/// An [`AccountComponent`] providing configurable mint-policy management for network faucets. +/// +/// It reexports policy procedures from `miden::standards::mint_policies` and manager procedures +/// from `miden::standards::mint_policies::policy_manager`: +/// - `allow_all` +/// - `set_mint_policy` +/// - `get_mint_policy` +/// +/// ## Storage Layout +/// +/// - [`Self::active_policy_proc_root_slot`]: Procedure root of the active mint policy. +/// - [`Self::allowed_policy_proc_roots_slot`]: Set of allowed mint policy procedure roots. +/// - [`Self::policy_authority_slot`]: Policy authority mode +/// ([`MintPolicyAuthority::AuthControlled`] = tx auth, [`MintPolicyAuthority::OwnerControlled`] = +/// external owner). +#[derive(Debug, Clone, Copy)] +pub struct AuthControlled { + initial_policy_root: Word, +} + +/// Initial policy configuration for the [`AuthControlled`] component. +#[derive(Debug, Clone, Copy, Default)] +pub enum AuthControlledInitConfig { + /// Sets the initial policy to `allow_all`. + #[default] + AllowAll, + /// Sets a custom initial policy root. + CustomInitialRoot(Word), +} + +impl AuthControlled { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::mint_policies::auth_controlled"; + + const ALLOW_ALL_PROC_NAME: &str = "allow_all"; + + /// Creates a new [`AuthControlled`] component from the provided configuration. + pub fn new(policy: AuthControlledInitConfig) -> Self { + let initial_policy_root = match policy { + AuthControlledInitConfig::AllowAll => Self::allow_all_policy_root(), + AuthControlledInitConfig::CustomInitialRoot(root) => root, + }; + + Self { initial_policy_root } + } + + /// Creates a new [`AuthControlled`] component with `allow_all` policy as + /// default. + pub fn allow_all() -> Self { + Self::new(AuthControlledInitConfig::AllowAll) + } + + /// Returns the [`StorageSlotName`] where the active mint policy procedure root is stored. + pub fn active_policy_proc_root_slot() -> &'static StorageSlotName { + &ACTIVE_MINT_POLICY_PROC_ROOT_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where allowed policy roots are stored. + pub fn allowed_policy_proc_roots_slot() -> &'static StorageSlotName { + &ALLOWED_MINT_POLICY_PROC_ROOTS_SLOT_NAME + } + + /// Returns the storage slot schema for the active mint policy root. + pub fn active_policy_proc_root_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::active_policy_proc_root_slot().clone(), + StorageSlotSchema::value( + "The procedure root of the active mint policy in the mint policy auth controlled component", + [ + FeltSchema::felt("proc_root_0"), + FeltSchema::felt("proc_root_1"), + FeltSchema::felt("proc_root_2"), + FeltSchema::felt("proc_root_3"), + ], + ), + ) + } + + /// Returns the storage slot schema for the allowed policy roots map. + pub fn allowed_policy_proc_roots_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::allowed_policy_proc_roots_slot().clone(), + StorageSlotSchema::map( + "The set of allowed mint policy procedure roots in the mint policy auth controlled component", + SchemaType::native_word(), + SchemaType::native_word(), + ), + ) + } + + /// Returns the [`StorageSlotName`] containing policy authority mode. + pub fn policy_authority_slot() -> &'static StorageSlotName { + MintPolicyAuthority::slot() + } + + /// Returns the storage slot schema for policy authority mode. + pub fn policy_authority_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::policy_authority_slot().clone(), + StorageSlotSchema::value( + "Policy authority mode (AuthControlled = tx auth, OwnerControlled = external owner)", + [ + FeltSchema::u8("policy_authority"), + FeltSchema::new_void(), + FeltSchema::new_void(), + FeltSchema::new_void(), + ], + ), + ) + } + + /// Returns the default `allow_all` policy root. + pub fn allow_all_policy_root() -> Word { + *ALLOW_ALL_POLICY_ROOT + } + + /// Returns the policy authority used by this component. + pub fn mint_policy_authority(&self) -> MintPolicyAuthority { + MintPolicyAuthority::AuthControlled + } +} + +impl Default for AuthControlled { + fn default() -> Self { + Self::allow_all() + } +} + +impl From for AccountComponent { + fn from(auth_controlled: AuthControlled) -> Self { + let active_policy_proc_root_slot = StorageSlot::with_value( + AuthControlled::active_policy_proc_root_slot().clone(), + auth_controlled.initial_policy_root, + ); + let allowed_policy_flag = Word::from([1u32, 0, 0, 0]); + let allow_all_policy_root = AuthControlled::allow_all_policy_root(); + + let mut allowed_policy_entries = + vec![(StorageMapKey::from_raw(allow_all_policy_root), allowed_policy_flag)]; + + if auth_controlled.initial_policy_root != allow_all_policy_root { + allowed_policy_entries.push(( + StorageMapKey::from_raw(auth_controlled.initial_policy_root), + allowed_policy_flag, + )); + } + + let allowed_policy_proc_roots = StorageMap::with_entries(allowed_policy_entries) + .expect("allowed mint policy roots should have unique keys"); + + let allowed_policy_proc_roots_slot = StorageSlot::with_map( + AuthControlled::allowed_policy_proc_roots_slot().clone(), + allowed_policy_proc_roots, + ); + let policy_authority_slot = StorageSlot::from(auth_controlled.mint_policy_authority()); + + let storage_schema = StorageSchema::new(vec![ + AuthControlled::active_policy_proc_root_slot_schema(), + AuthControlled::allowed_policy_proc_roots_slot_schema(), + AuthControlled::policy_authority_slot_schema(), + ]) + .expect("storage schema should be valid"); + + let metadata = + AccountComponentMetadata::new(AuthControlled::NAME, [AccountType::FungibleFaucet]) + .with_description( + "Mint policy auth controlled component for network fungible faucets", + ) + .with_storage_schema(storage_schema); + + AccountComponent::new( + auth_controlled_library(), + vec![ + active_policy_proc_root_slot, + allowed_policy_proc_roots_slot, + policy_authority_slot, + ], + metadata, + ) + .expect( + "mint policy auth controlled component should satisfy the requirements of a valid account component", + ) + } +} diff --git a/crates/miden-standards/src/account/mint_policies/mod.rs b/crates/miden-standards/src/account/mint_policies/mod.rs new file mode 100644 index 0000000000..91f990df62 --- /dev/null +++ b/crates/miden-standards/src/account/mint_policies/mod.rs @@ -0,0 +1,46 @@ +use miden_protocol::Word; +use miden_protocol::account::{StorageSlot, StorageSlotName}; +use miden_protocol::utils::sync::LazyLock; + +mod auth_controlled; +mod owner_controlled; + +pub use auth_controlled::{AuthControlled, AuthControlledInitConfig}; +pub use owner_controlled::{OwnerControlled, OwnerControlledInitConfig}; + +static POLICY_AUTHORITY_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::mint_policy_manager::policy_authority") + .expect("storage slot name should be valid") +}); + +/// Identifies which authority is allowed to manage the active mint policy for a faucet. +/// +/// This value is stored in the policy authority slot so the account can distinguish whether mint +/// policy updates are governed by authentication component logic or by the account owner. +#[repr(u8)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum MintPolicyAuthority { + /// Mint policy changes are authorized by the account's authentication component logic. + AuthControlled = 0, + /// Mint policy changes are authorized by the external account owner. + OwnerControlled = 1, +} + +impl MintPolicyAuthority { + /// Returns the [`StorageSlotName`] containing the mint policy authority mode. + pub fn slot() -> &'static StorageSlotName { + &POLICY_AUTHORITY_SLOT_NAME + } +} + +impl From for Word { + fn from(value: MintPolicyAuthority) -> Self { + Word::from([value as u32, 0, 0, 0]) + } +} + +impl From for StorageSlot { + fn from(value: MintPolicyAuthority) -> Self { + StorageSlot::with_value(MintPolicyAuthority::slot().clone(), value.into()) + } +} diff --git a/crates/miden-standards/src/account/mint_policies/owner_controlled.rs b/crates/miden-standards/src/account/mint_policies/owner_controlled.rs new file mode 100644 index 0000000000..4cc606f841 --- /dev/null +++ b/crates/miden-standards/src/account/mint_policies/owner_controlled.rs @@ -0,0 +1,225 @@ +use miden_protocol::Word; +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::utils::sync::LazyLock; + +use super::MintPolicyAuthority; +use crate::account::components::owner_controlled_library; +use crate::procedure_digest; + +// CONSTANTS +// ================================================================================================ + +procedure_digest!( + OWNER_ONLY_POLICY_ROOT, + OwnerControlled::NAME, + OwnerControlled::OWNER_ONLY_PROC_NAME, + owner_controlled_library +); + +static ACTIVE_MINT_POLICY_PROC_ROOT_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::mint_policy_manager::active_policy_proc_root") + .expect("storage slot name should be valid") +}); +static ALLOWED_MINT_POLICY_PROC_ROOTS_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::mint_policy_manager::allowed_policy_proc_roots") + .expect("storage slot name should be valid") +}); +/// An [`AccountComponent`] providing configurable mint-policy management for network faucets. +/// +/// It reexports policy procedures from `miden::standards::mint_policies` and manager procedures +/// from `miden::standards::mint_policies::policy_manager`: +/// - `owner_only` +/// - `set_mint_policy` +/// - `get_mint_policy` +/// +/// ## Storage Layout +/// +/// - [`Self::active_policy_proc_root_slot`]: Procedure root of the active mint policy. +/// - [`Self::allowed_policy_proc_roots_slot`]: Set of allowed mint policy procedure roots. +/// - [`Self::policy_authority_slot`]: Policy authority mode +/// ([`MintPolicyAuthority::AuthControlled`] = tx auth, [`MintPolicyAuthority::OwnerControlled`] = +/// external owner). +#[derive(Debug, Clone, Copy)] +pub struct OwnerControlled { + initial_policy_root: Word, +} + +/// Initial policy configuration for the [`OwnerControlled`] component. +#[derive(Debug, Clone, Copy, Default)] +pub enum OwnerControlledInitConfig { + /// Sets the initial policy to `owner_only`. + #[default] + OwnerOnly, + /// Sets a custom initial policy root. + CustomInitialRoot(Word), +} + +impl OwnerControlled { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::mint_policies::owner_controlled"; + + const OWNER_ONLY_PROC_NAME: &str = "owner_only"; + + /// Creates a new [`OwnerControlled`] component from the provided configuration. + pub fn new(policy: OwnerControlledInitConfig) -> Self { + let initial_policy_root = match policy { + OwnerControlledInitConfig::OwnerOnly => Self::owner_only_policy_root(), + OwnerControlledInitConfig::CustomInitialRoot(root) => root, + }; + + Self { initial_policy_root } + } + + /// Creates a new [`OwnerControlled`] component with owner-only policy as default. + pub fn owner_only() -> Self { + Self::new(OwnerControlledInitConfig::OwnerOnly) + } + + /// Returns the [`StorageSlotName`] where the active mint policy procedure root is stored. + pub fn active_policy_proc_root_slot() -> &'static StorageSlotName { + &ACTIVE_MINT_POLICY_PROC_ROOT_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where allowed policy roots are stored. + pub fn allowed_policy_proc_roots_slot() -> &'static StorageSlotName { + &ALLOWED_MINT_POLICY_PROC_ROOTS_SLOT_NAME + } + + /// Returns the storage slot schema for the active mint policy root. + pub fn active_policy_proc_root_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::active_policy_proc_root_slot().clone(), + StorageSlotSchema::value( + "The procedure root of the active mint policy in the mint policy owner controlled component", + [ + FeltSchema::felt("proc_root_0"), + FeltSchema::felt("proc_root_1"), + FeltSchema::felt("proc_root_2"), + FeltSchema::felt("proc_root_3"), + ], + ), + ) + } + + /// Returns the storage slot schema for the allowed policy roots map. + pub fn allowed_policy_proc_roots_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::allowed_policy_proc_roots_slot().clone(), + StorageSlotSchema::map( + "The set of allowed mint policy procedure roots in the mint policy owner controlled component", + SchemaType::native_word(), + SchemaType::native_word(), + ), + ) + } + + /// Returns the [`StorageSlotName`] containing policy authority mode. + pub fn policy_authority_slot() -> &'static StorageSlotName { + MintPolicyAuthority::slot() + } + + /// Returns the storage slot schema for policy authority mode. + pub fn policy_authority_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::policy_authority_slot().clone(), + StorageSlotSchema::value( + "Policy authority mode (AuthControlled = tx auth, OwnerControlled = external owner)", + [ + FeltSchema::u8("policy_authority"), + FeltSchema::new_void(), + FeltSchema::new_void(), + FeltSchema::new_void(), + ], + ), + ) + } + + /// Returns the default owner-only policy root. + pub fn owner_only_policy_root() -> Word { + *OWNER_ONLY_POLICY_ROOT + } + + /// Returns the policy authority used by this component. + pub fn mint_policy_authority(&self) -> MintPolicyAuthority { + MintPolicyAuthority::OwnerControlled + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new(vec![ + OwnerControlled::active_policy_proc_root_slot_schema(), + OwnerControlled::allowed_policy_proc_roots_slot_schema(), + OwnerControlled::policy_authority_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(OwnerControlled::NAME, [AccountType::FungibleFaucet]) + .with_description("Mint policy owner controlled component for network fungible faucets") + .with_storage_schema(storage_schema) + } +} + +impl Default for OwnerControlled { + fn default() -> Self { + Self::owner_only() + } +} + +impl From for AccountComponent { + fn from(owner_controlled: OwnerControlled) -> Self { + let active_policy_proc_root_slot = StorageSlot::with_value( + OwnerControlled::active_policy_proc_root_slot().clone(), + owner_controlled.initial_policy_root, + ); + let allowed_policy_flag = Word::from([1u32, 0, 0, 0]); + let owner_only_policy_root = OwnerControlled::owner_only_policy_root(); + + let mut allowed_policy_entries = + vec![(StorageMapKey::from_raw(owner_only_policy_root), allowed_policy_flag)]; + + if owner_controlled.initial_policy_root != owner_only_policy_root { + allowed_policy_entries.push(( + StorageMapKey::from_raw(owner_controlled.initial_policy_root), + allowed_policy_flag, + )); + } + + let allowed_policy_proc_roots = StorageMap::with_entries(allowed_policy_entries) + .expect("allowed mint policy roots should have unique keys"); + + let allowed_policy_proc_roots_slot = StorageSlot::with_map( + OwnerControlled::allowed_policy_proc_roots_slot().clone(), + allowed_policy_proc_roots, + ); + let policy_authority_slot = StorageSlot::from(owner_controlled.mint_policy_authority()); + + let metadata = OwnerControlled::component_metadata(); + + AccountComponent::new( + owner_controlled_library(), + vec![ + active_policy_proc_root_slot, + allowed_policy_proc_roots_slot, + policy_authority_slot, + ], + metadata, + ) + .expect( + "mint policy owner controlled component should satisfy the requirements of a valid account component", + ) + } +} diff --git a/crates/miden-standards/src/account/mod.rs b/crates/miden-standards/src/account/mod.rs index 56e4dbe720..9580c185f8 100644 --- a/crates/miden-standards/src/account/mod.rs +++ b/crates/miden-standards/src/account/mod.rs @@ -1,10 +1,12 @@ use super::auth_method::AuthMethod; +pub mod access; pub mod auth; pub mod components; pub mod faucets; pub mod interface; pub mod metadata; +pub mod mint_policies; pub mod wallets; pub use metadata::AccountBuilderSchemaCommitmentExt; @@ -14,29 +16,35 @@ pub use metadata::AccountBuilderSchemaCommitmentExt; /// This macro generates a `LazyLock` static variable that lazily initializes /// the digest of a procedure from a library. /// +/// The full procedure path is constructed by concatenating `$component_name` and `$proc_name` +/// with `::` as separator (i.e. `"{component_name}::{proc_name}"`). +/// /// Note: This macro references exported types from `miden_protocol`, so your crate must /// include `miden_protocol` as a dependency. /// /// # Arguments /// * `$name` - The name of the static variable to create -/// * `$proc_name` - The string name of the procedure +/// * `$component_name` - The name of the component (e.g. `BasicWallet::NAME`) +/// * `$proc_name` - The short name of the procedure (e.g. `"receive_asset"`) /// * `$library_fn` - The function that returns the library containing the procedure /// /// # Example /// ```ignore /// procedure_digest!( /// BASIC_WALLET_RECEIVE_ASSET, +/// BasicWallet::NAME, /// BasicWallet::RECEIVE_ASSET_PROC_NAME, /// basic_wallet_library /// ); /// ``` #[macro_export] macro_rules! procedure_digest { - ($name:ident, $proc_name:expr, $library_fn:expr) => { + ($name:ident, $component_name:expr, $proc_name:expr, $library_fn:expr) => { static $name: miden_protocol::utils::sync::LazyLock = miden_protocol::utils::sync::LazyLock::new(|| { - $library_fn().get_procedure_root_by_path($proc_name).unwrap_or_else(|| { - panic!("{} should contain '{}' procedure", stringify!($library_fn), $proc_name) + let full_path = alloc::format!("{}::{}", $component_name, $proc_name); + $library_fn().get_procedure_root_by_path(full_path.as_str()).unwrap_or_else(|| { + panic!("{} should contain '{}' procedure", stringify!($library_fn), full_path) }) }); }; diff --git a/crates/miden-standards/src/account/wallets/mod.rs b/crates/miden-standards/src/account/wallets/mod.rs index 7f6da4810a..c220a0b3e6 100644 --- a/crates/miden-standards/src/account/wallets/mod.rs +++ b/crates/miden-standards/src/account/wallets/mod.rs @@ -23,6 +23,7 @@ use crate::procedure_digest; // Initialize the digest of the `receive_asset` procedure of the Basic Wallet only once. procedure_digest!( BASIC_WALLET_RECEIVE_ASSET, + BasicWallet::NAME, BasicWallet::RECEIVE_ASSET_PROC_NAME, basic_wallet_library ); @@ -30,6 +31,7 @@ procedure_digest!( // Initialize the digest of the `move_asset_to_note` procedure of the Basic Wallet only once. procedure_digest!( BASIC_WALLET_MOVE_ASSET_TO_NOTE, + BasicWallet::NAME, BasicWallet::MOVE_ASSET_TO_NOTE_PROC_NAME, basic_wallet_library ); @@ -57,10 +59,10 @@ impl BasicWallet { // -------------------------------------------------------------------------------------------- /// The name of the component. - pub const NAME: &'static str = "miden::basic_wallet"; + pub const NAME: &'static str = "miden::standards::components::wallets::basic_wallet"; - const RECEIVE_ASSET_PROC_NAME: &str = "basic_wallet::receive_asset"; - const MOVE_ASSET_TO_NOTE_PROC_NAME: &str = "basic_wallet::move_asset_to_note"; + const RECEIVE_ASSET_PROC_NAME: &str = "receive_asset"; + const MOVE_ASSET_TO_NOTE_PROC_NAME: &str = "move_asset_to_note"; // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- @@ -74,13 +76,17 @@ impl BasicWallet { pub fn move_asset_to_note_digest() -> Word { *BASIC_WALLET_MOVE_ASSET_TO_NOTE } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Basic wallet component for receiving and sending assets") + } } impl From for AccountComponent { fn from(_: BasicWallet) -> Self { - let metadata = AccountComponentMetadata::new(BasicWallet::NAME) - .with_description("Basic wallet component for receiving and sending assets") - .with_supports_all_types(); + let metadata = BasicWallet::component_metadata(); AccountComponent::new(basic_wallet_library(), vec![], metadata).expect( "basic wallet component should satisfy the requirements of a valid account component", @@ -162,8 +168,8 @@ pub fn create_basic_wallet( #[cfg(test)] mod tests { - use miden_processor::utils::{Deserializable, Serializable}; use miden_protocol::account::auth::{self, PublicKeyCommitment}; + use miden_protocol::utils::serde::{Deserializable, Serializable}; use miden_protocol::{ONE, Word}; use super::{Account, AccountStorageMode, AccountType, AuthMethod, create_basic_wallet}; @@ -172,7 +178,7 @@ mod tests { #[test] fn test_create_basic_wallet() { let pub_key = PublicKeyCommitment::from(Word::from([ONE; 4])); - let auth_scheme = auth::AuthScheme::Falcon512Rpo; + let auth_scheme = auth::AuthScheme::Falcon512Poseidon2; let wallet = create_basic_wallet( [1; 32], AuthMethod::SingleSig { approver: (pub_key, auth_scheme) }, diff --git a/crates/miden-standards/src/auth_method.rs b/crates/miden-standards/src/auth_method.rs index 3477361932..fc2d1a02de 100644 --- a/crates/miden-standards/src/auth_method.rs +++ b/crates/miden-standards/src/auth_method.rs @@ -10,11 +10,12 @@ pub enum AuthMethod { /// execution, avoiding unnecessary nonce increments for transactions that don't modify the /// account state. NoAuth, - /// A single-key authentication method which relies on either ECDSA or Falcon512Rpo signatures. + /// A single-key authentication method which relies on either ECDSA or Falcon512Poseidon2 + /// signatures. SingleSig { approver: (PublicKeyCommitment, AuthScheme), }, - /// A multi-signature authentication method using either ECDSA or Falcon512Rpo signatures. + /// A multi-signature authentication method using either ECDSA or Falcon512Poseidon2 signatures. /// /// Requires a threshold number of signatures from the provided public keys. Multisig { diff --git a/crates/miden-standards/src/code_builder/mod.rs b/crates/miden-standards/src/code_builder/mod.rs index dfcb4e8e45..9a1da52c00 100644 --- a/crates/miden-standards/src/code_builder/mod.rs +++ b/crates/miden-standards/src/code_builder/mod.rs @@ -111,6 +111,18 @@ impl CodeBuilder { } } + // CONFIGURATION + // -------------------------------------------------------------------------------------------- + + /// Configures the assembler to treat warning diagnostics as errors. + /// + /// When enabled, any warning emitted during compilation will be promoted to an error, + /// causing the compilation to fail. + pub fn with_warnings_as_errors(mut self, yes: bool) -> Self { + self.assembler = self.assembler.with_warnings_as_errors(yes); + self + } + // LIBRARY MANAGEMENT // -------------------------------------------------------------------------------------------- @@ -424,7 +436,7 @@ impl CodeBuilder { /// /// [account_lib]: crate::testing::mock_account_code::MockAccountCodeExt::mock_account_library /// [faucet_lib]: crate::testing::mock_account_code::MockAccountCodeExt::mock_faucet_library - /// [util_lib]: miden_protocol::testing::mock_util_lib::mock_util_library + /// [util_lib]: crate::testing::mock_util_lib::mock_util_library #[cfg(any(feature = "testing", test))] pub fn with_mock_libraries() -> Self { Self::with_mock_libraries_with_source_manager(Arc::new(DefaultSourceManager::default())) @@ -444,7 +456,7 @@ impl CodeBuilder { pub fn with_mock_libraries_with_source_manager( source_manager: Arc, ) -> Self { - use miden_protocol::testing::mock_util_lib::mock_util_library; + use crate::testing::mock_util_lib::mock_util_library; // Start with the builder linking against the transaction kernel, protocol library and // standards library. @@ -695,6 +707,12 @@ mod tests { Ok(()) } + #[test] + fn test_code_builder_warnings_as_errors() { + let assembler: Assembler = CodeBuilder::default().with_warnings_as_errors(true).into(); + assert!(assembler.warnings_as_errors()); + } + #[test] fn test_code_builder_with_advice_map_entry() -> anyhow::Result<()> { let key = Word::from([1u32, 2, 3, 4]); diff --git a/crates/miden-standards/src/errors/mod.rs b/crates/miden-standards/src/errors/mod.rs index 2bf69e28e0..f1c21dd45b 100644 --- a/crates/miden-standards/src/errors/mod.rs +++ b/crates/miden-standards/src/errors/mod.rs @@ -1,7 +1,8 @@ /// The errors from the MASM code of the Miden standards. #[cfg(any(feature = "testing", test))] -#[rustfmt::skip] -pub mod standards; +pub mod standards { + include!(concat!(env!("OUT_DIR"), "/standards_errors.rs")); +} mod code_builder_errors; pub use code_builder_errors::CodeBuilderError; diff --git a/crates/miden-standards/src/errors/standards.rs b/crates/miden-standards/src/errors/standards.rs deleted file mode 100644 index 98877b101f..0000000000 --- a/crates/miden-standards/src/errors/standards.rs +++ /dev/null @@ -1,76 +0,0 @@ -use miden_protocol::errors::MasmError; - -// This file is generated by build.rs, do not modify manually. -// It is generated by extracting errors from the MASM files in the `./asm` directory. -// -// To add a new error, define a constant in MASM of the pattern `const ERR__...`. -// Try to fit the error into a pre-existing category if possible (e.g. Account, Note, ...). - -// STANDARDS ERRORS -// ================================================================================================ - -/// Error Message: "initial and new number of approvers must be u32" -pub const ERR_APPROVER_COUNTS_NOT_U32: MasmError = MasmError::from_static_str("initial and new number of approvers must be u32"); - -/// Error Message: "burn requires exactly 1 note asset" -pub const ERR_BASIC_FUNGIBLE_BURN_WRONG_NUMBER_OF_ASSETS: MasmError = MasmError::from_static_str("burn requires exactly 1 note asset"); - -/// Error Message: "asset amount to burn exceeds the existing token supply" -pub const ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY: MasmError = MasmError::from_static_str("asset amount to burn exceeds the existing token supply"); - -/// Error Message: "token_supply plus the amount passed to distribute would exceed the maximum supply" -pub const ERR_FUNGIBLE_ASSET_DISTRIBUTE_AMOUNT_EXCEEDS_MAX_SUPPLY: MasmError = MasmError::from_static_str("token_supply plus the amount passed to distribute would exceed the maximum supply"); -/// Error Message: "max supply exceeds maximum representable fungible asset amount" -pub const ERR_FUNGIBLE_ASSET_MAX_SUPPLY_EXCEEDS_FUNGIBLE_ASSET_MAX_AMOUNT: MasmError = MasmError::from_static_str("max supply exceeds maximum representable fungible asset amount"); -/// Error Message: "token supply exceeds max supply" -pub const ERR_FUNGIBLE_ASSET_TOKEN_SUPPLY_EXCEEDS_MAX_SUPPLY: MasmError = MasmError::from_static_str("token supply exceeds max supply"); - -/// Error Message: "invalid signature scheme id: expected 2 for falcon512_rpo, 1 for ecdsa_k256_keccak" -pub const ERR_INVALID_SCHEME_ID: MasmError = MasmError::from_static_str("invalid signature scheme id: expected 2 for falcon512_rpo, 1 for ecdsa_k256_keccak"); -/// Error Message: "invalid scheme ID word format expected three zero values followed by the scheme ID" -pub const ERR_INVALID_SCHEME_ID_WORD: MasmError = MasmError::from_static_str("invalid scheme ID word format expected three zero values followed by the scheme ID"); - -/// Error Message: "number of approvers must be equal to or greater than threshold" -pub const ERR_MALFORMED_MULTISIG_CONFIG: MasmError = MasmError::from_static_str("number of approvers must be equal to or greater than threshold"); - -/// Error Message: "MINT script expects exactly 12 storage items for private or 16+ storage items for public output notes" -pub const ERR_MINT_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("MINT script expects exactly 12 storage items for private or 16+ storage items for public output notes"); - -/// Error Message: "note tag length can be at most 32" -pub const ERR_NOTE_TAG_MAX_ACCOUNT_TARGET_LENGTH_EXCEEDED: MasmError = MasmError::from_static_str("note tag length can be at most 32"); - -/// Error Message: "attachment is not a valid network account target" -pub const ERR_NOT_NETWORK_ACCOUNT_TARGET: MasmError = MasmError::from_static_str("attachment is not a valid network account target"); - -/// Error Message: "failed to reclaim P2IDE note because the reclaiming account is not the sender" -pub const ERR_P2IDE_RECLAIM_ACCT_IS_NOT_SENDER: MasmError = MasmError::from_static_str("failed to reclaim P2IDE note because the reclaiming account is not the sender"); -/// Error Message: "P2IDE reclaim is disabled" -pub const ERR_P2IDE_RECLAIM_DISABLED: MasmError = MasmError::from_static_str("P2IDE reclaim is disabled"); -/// Error Message: "failed to reclaim P2IDE note because the reclaim block height is not reached yet" -pub const ERR_P2IDE_RECLAIM_HEIGHT_NOT_REACHED: MasmError = MasmError::from_static_str("failed to reclaim P2IDE note because the reclaim block height is not reached yet"); -/// Error Message: "failed to consume P2IDE note because the note is still timelocked" -pub const ERR_P2IDE_TIMELOCK_HEIGHT_NOT_REACHED: MasmError = MasmError::from_static_str("failed to consume P2IDE note because the note is still timelocked"); -/// Error Message: "P2IDE note expects exactly 4 note storage items" -pub const ERR_P2IDE_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("P2IDE note expects exactly 4 note storage items"); - -/// Error Message: "P2ID's target account address and transaction address do not match" -pub const ERR_P2ID_TARGET_ACCT_MISMATCH: MasmError = MasmError::from_static_str("P2ID's target account address and transaction address do not match"); -/// Error Message: "P2ID note expects exactly 2 note storage items" -pub const ERR_P2ID_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("P2ID note expects exactly 2 note storage items"); - -/// Error Message: "note sender is not the owner" -pub const ERR_SENDER_NOT_OWNER: MasmError = MasmError::from_static_str("note sender is not the owner"); - -/// Error Message: "signer index must be u32" -pub const ERR_SIGNER_INDEX_NOT_U32: MasmError = MasmError::from_static_str("signer index must be u32"); - -/// Error Message: "SWAP script expects exactly 16 note storage items" -pub const ERR_SWAP_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("SWAP script expects exactly 16 note storage items"); -/// Error Message: "SWAP script requires exactly 1 note asset" -pub const ERR_SWAP_WRONG_NUMBER_OF_ASSETS: MasmError = MasmError::from_static_str("SWAP script requires exactly 1 note asset"); - -/// Error Message: "failed to approve multisig transaction as it was already executed" -pub const ERR_TX_ALREADY_EXECUTED: MasmError = MasmError::from_static_str("failed to approve multisig transaction as it was already executed"); - -/// Error Message: "number of approvers or threshold must not be zero" -pub const ERR_ZERO_IN_MULTISIG_CONFIG: MasmError = MasmError::from_static_str("number of approvers or threshold must not be zero"); diff --git a/crates/miden-standards/src/lib.rs b/crates/miden-standards/src/lib.rs index f3365c64bf..0a811b2ccf 100644 --- a/crates/miden-standards/src/lib.rs +++ b/crates/miden-standards/src/lib.rs @@ -14,6 +14,7 @@ pub mod code_builder; pub mod errors; pub mod note; mod standards_lib; +pub mod utils; pub use standards_lib::StandardsLib; diff --git a/crates/miden-standards/src/note/mod.rs b/crates/miden-standards/src/note/mod.rs index b945ccdad1..7da32ea234 100644 --- a/crates/miden-standards/src/note/mod.rs +++ b/crates/miden-standards/src/note/mod.rs @@ -27,11 +27,14 @@ mod p2ide; pub use p2ide::{P2ideNote, P2ideNoteStorage}; mod swap; -pub use swap::SwapNote; +pub use swap::{SwapNote, SwapNoteStorage}; mod network_account_target; pub use network_account_target::{NetworkAccountTarget, NetworkAccountTargetError}; +mod network_note; +pub use network_note::{AccountTargetNetworkNote, NetworkNoteExt}; + mod standard_note_attachment; use miden_protocol::errors::NoteError; pub use standard_note_attachment::StandardNoteAttachment; @@ -151,7 +154,7 @@ impl StandardNote { // note-based authentication (checking if the note sender equals the faucet owner) // to authorize minting, while basic faucets have different mint procedures that // are not compatible with MINT notes. - interface_proc_digests.contains(&NetworkFungibleFaucet::distribute_digest()) + interface_proc_digests.contains(&NetworkFungibleFaucet::mint_and_send_digest()) }, Self::BURN => { // BURN notes work with both basic and network fungible faucets because both diff --git a/crates/miden-standards/src/note/network_account_target.rs b/crates/miden-standards/src/note/network_account_target.rs index 25980428a5..4471c145f8 100644 --- a/crates/miden-standards/src/note/network_account_target.rs +++ b/crates/miden-standards/src/note/network_account_target.rs @@ -6,6 +6,7 @@ use miden_protocol::note::{ NoteAttachmentContent, NoteAttachmentKind, NoteAttachmentScheme, + NoteType, }; use crate::note::{NoteExecutionHint, StandardNoteAttachment}; @@ -100,10 +101,10 @@ impl TryFrom<&NoteAttachment> for NetworkAccountTarget { let id_prefix = word[1]; let exec_hint = word[2]; - let target_id = AccountId::try_from([id_prefix, id_suffix]) + let target_id = AccountId::try_from_elements(id_suffix, id_prefix) .map_err(NetworkAccountTargetError::DecodeTargetId)?; - let exec_hint = NoteExecutionHint::try_from(exec_hint.as_int()) + let exec_hint = NoteExecutionHint::try_from(exec_hint.as_canonical_u64()) .map_err(NetworkAccountTargetError::DecodeExecutionHint)?; NetworkAccountTarget::new(target_id, exec_hint) @@ -136,6 +137,8 @@ pub enum NetworkAccountTargetError { DecodeTargetId(#[source] AccountIdError), #[error("failed to decode execution hint")] DecodeExecutionHint(#[source] NoteError), + #[error("network note must be public, but was {0:?}")] + NoteNotPublic(NoteType), } // TESTS diff --git a/crates/miden-standards/src/note/network_note.rs b/crates/miden-standards/src/note/network_note.rs new file mode 100644 index 0000000000..c0a1c51559 --- /dev/null +++ b/crates/miden-standards/src/note/network_note.rs @@ -0,0 +1,108 @@ +use miden_protocol::account::AccountId; +use miden_protocol::note::{Note, NoteAttachment, NoteMetadata, NoteType}; + +use crate::note::{NetworkAccountTarget, NetworkAccountTargetError, NoteExecutionHint}; + +/// A wrapper around a [`Note`] that is guaranteed to target a network account via a +/// [`NetworkAccountTarget`] attachment. +/// +/// This represents a note that is specifically targeted at a single network account. In the future, +/// other types of network notes may exist (e.g., SWAP notes that can be consumed by network +/// accounts but are not targeted at a specific one). +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct AccountTargetNetworkNote { + note: Note, +} + +impl AccountTargetNetworkNote { + /// Attempts to construct an [`AccountTargetNetworkNote`] from `note`. + /// + /// Returns an error if: + /// - the note is not [`NoteType::Public`]. + /// - the note's attachment cannot be decoded as a [`NetworkAccountTarget`]. + pub fn new(note: Note) -> Result { + // Network notes must be public. + if note.metadata().note_type() != NoteType::Public { + return Err(NetworkAccountTargetError::NoteNotPublic(note.metadata().note_type())); + } + + // Validate that the attachment is a valid NetworkAccountTarget. + NetworkAccountTarget::try_from(note.metadata().attachment())?; + Ok(Self { note }) + } + + /// Consumes `self` and returns the underlying [`Note`]. + pub fn into_note(self) -> Note { + self.note + } + + /// Returns a reference to the underlying [`Note`]. + pub fn as_note(&self) -> &Note { + &self.note + } + + /// Returns the [`NoteMetadata`] of the underlying note. + pub fn metadata(&self) -> &NoteMetadata { + self.note.metadata() + } + + /// Returns the target network [`AccountId`]. + pub fn target_account_id(&self) -> AccountId { + self.target().target_id() + } + + /// Returns the decoded [`NetworkAccountTarget`] attachment. + pub fn target(&self) -> NetworkAccountTarget { + NetworkAccountTarget::try_from(self.note.metadata().attachment()) + .expect("AccountTargetNetworkNote guarantees valid NetworkAccountTarget attachment") + } + + /// Returns the [`NoteExecutionHint`] from the decoded [`NetworkAccountTarget`] attachment. + pub fn execution_hint(&self) -> NoteExecutionHint { + self.target().execution_hint() + } + + /// Returns the raw [`NoteAttachment`] from the note metadata. + pub fn attachment(&self) -> &NoteAttachment { + self.metadata().attachment() + } + + /// Returns the [`NoteType`] of the underlying note. + pub fn note_type(&self) -> NoteType { + self.metadata().note_type() + } +} + +/// Convenience helpers for [`Note`]s that may target a network account. +pub trait NetworkNoteExt { + /// Returns `true` if this note is public and its attachment decodes as a + /// [`NetworkAccountTarget`]. + fn is_network_note(&self) -> bool; + + /// Consumes `self` and returns an [`AccountTargetNetworkNote`], or an error if the attachment + /// is not a valid target. + fn into_account_target_network_note( + self, + ) -> Result; +} + +impl NetworkNoteExt for Note { + fn is_network_note(&self) -> bool { + self.metadata().note_type() == NoteType::Public + && NetworkAccountTarget::try_from(self.metadata().attachment()).is_ok() + } + + fn into_account_target_network_note( + self, + ) -> Result { + AccountTargetNetworkNote::new(self) + } +} + +impl TryFrom for AccountTargetNetworkNote { + type Error = NetworkAccountTargetError; + + fn try_from(note: Note) -> Result { + Self::new(note) + } +} diff --git a/crates/miden-standards/src/note/p2id.rs b/crates/miden-standards/src/note/p2id.rs index 6749a09f96..82ea64f41a 100644 --- a/crates/miden-standards/src/note/p2id.rs +++ b/crates/miden-standards/src/note/p2id.rs @@ -150,8 +150,8 @@ impl TryFrom<&[Felt]> for P2idNoteStorage { }); } - let target = AccountId::try_from([note_storage[1], note_storage[0]]) - .map_err(|e| NoteError::other_with_source("failed to create account id", e))?; + let target = AccountId::try_from_elements(note_storage[0], note_storage[1]) + .map_err(|err| NoteError::other_with_source("failed to create account id", err))?; Ok(Self { target }) } @@ -162,9 +162,9 @@ impl TryFrom<&[Felt]> for P2idNoteStorage { #[cfg(test)] mod tests { + use miden_protocol::Felt; use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; use miden_protocol::errors::NoteError; - use miden_protocol::{Felt, FieldElement}; use super::*; diff --git a/crates/miden-standards/src/note/p2ide.rs b/crates/miden-standards/src/note/p2ide.rs index 5933f78474..aa1bdafe15 100644 --- a/crates/miden-standards/src/note/p2ide.rs +++ b/crates/miden-standards/src/note/p2ide.rs @@ -18,7 +18,7 @@ use miden_protocol::note::{ NoteType, }; use miden_protocol::utils::sync::LazyLock; -use miden_protocol::{Felt, FieldElement, Word}; +use miden_protocol::{Felt, Word}; use crate::StandardsLib; // NOTE SCRIPT @@ -179,14 +179,14 @@ impl TryFrom<&[Felt]> for P2ideNoteStorage { }); } - let target = AccountId::try_from([note_storage[1], note_storage[0]]) - .map_err(|e| NoteError::other_with_source("failed to create account id", e))?; + let target = AccountId::try_from_elements(note_storage[0], note_storage[1]) + .map_err(|err| NoteError::other_with_source("failed to create account id", err))?; let reclaim_height = if note_storage[2] == Felt::ZERO { None } else { let height: u32 = note_storage[2] - .as_int() + .as_canonical_u64() .try_into() .map_err(|e| NoteError::other_with_source("invalid note storage", e))?; @@ -197,7 +197,7 @@ impl TryFrom<&[Felt]> for P2ideNoteStorage { None } else { let height: u32 = note_storage[3] - .as_int() + .as_canonical_u64() .try_into() .map_err(|e| NoteError::other_with_source("invalid note storage", e))?; @@ -213,10 +213,10 @@ impl TryFrom<&[Felt]> for P2ideNoteStorage { #[cfg(test)] mod tests { + use miden_protocol::Felt; use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; use miden_protocol::block::BlockNumber; use miden_protocol::errors::NoteError; - use miden_protocol::{Felt, FieldElement}; use super::*; diff --git a/crates/miden-standards/src/note/swap.rs b/crates/miden-standards/src/note/swap.rs index 33e0df497d..ae91cf445f 100644 --- a/crates/miden-standards/src/note/swap.rs +++ b/crates/miden-standards/src/note/swap.rs @@ -48,7 +48,7 @@ impl SwapNote { // -------------------------------------------------------------------------------------------- /// Expected number of storage items of the SWAP note. - pub const NUM_STORAGE_ITEMS: usize = 16; + pub const NUM_STORAGE_ITEMS: usize = SwapNoteStorage::NUM_ITEMS; // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- @@ -89,43 +89,31 @@ impl SwapNote { return Err(NoteError::other("requested asset same as offered asset")); } - let note_script = Self::script(); - let payback_serial_num = rng.draw_word(); - let payback_recipient = P2idNoteStorage::new(sender).into_recipient(payback_serial_num); - let requested_asset_word: Word = requested_asset.into(); - let payback_tag = NoteTag::with_account_target(sender); - - let attachment_scheme = Felt::from(payback_note_attachment.attachment_scheme().as_u32()); - let attachment_kind = Felt::from(payback_note_attachment.attachment_kind().as_u8()); - let attachment = payback_note_attachment.content().to_word(); + let swap_storage = SwapNoteStorage::new( + sender, + requested_asset, + payback_note_type, + payback_note_attachment, + payback_serial_num, + ); - let mut inputs = Vec::with_capacity(16); - inputs.extend_from_slice(&[ - payback_note_type.into(), - payback_tag.into(), - attachment_scheme, - attachment_kind, - ]); - inputs.extend_from_slice(attachment.as_elements()); - inputs.extend_from_slice(requested_asset_word.as_elements()); - inputs.extend_from_slice(payback_recipient.digest().as_elements()); - let inputs = NoteStorage::new(inputs)?; + let serial_num = rng.draw_word(); + let recipient = swap_storage.into_recipient(serial_num); // build the tag for the SWAP use case let tag = Self::build_tag(swap_note_type, &offered_asset, &requested_asset); - let serial_num = rng.draw_word(); // build the outgoing note let metadata = NoteMetadata::new(sender, swap_note_type) .with_tag(tag) .with_attachment(swap_note_attachment); let assets = NoteAssets::new(vec![offered_asset])?; - let recipient = NoteRecipient::new(serial_num, note_script, inputs); let note = Note::new(assets, metadata, recipient); // build the payback note details + let payback_recipient = P2idNoteStorage::new(sender).into_recipient(payback_serial_num); let payback_assets = NoteAssets::new(vec![requested_asset])?; let payback_note = NoteDetails::new(payback_assets, payback_recipient); @@ -156,10 +144,10 @@ impl SwapNote { swap_use_case_id |= (swap_root_bytes[1] >> 2) as u16; // Get bits 0..8 from the faucet IDs of both assets which will form the tag payload. - let offered_asset_id: u64 = offered_asset.faucet_id_prefix().into(); + let offered_asset_id: u64 = offered_asset.faucet_id().prefix().into(); let offered_asset_tag = (offered_asset_id >> 56) as u8; - let requested_asset_id: u64 = requested_asset.faucet_id_prefix().into(); + let requested_asset_id: u64 = requested_asset.faucet_id().prefix().into(); let requested_asset_tag = (requested_asset_id >> 56) as u8; let asset_pair = ((offered_asset_tag as u16) << 8) | (requested_asset_tag as u16); @@ -172,17 +160,215 @@ impl SwapNote { } } +// SWAP NOTE STORAGE +// ================================================================================================ + +/// Canonical storage representation for a SWAP note. +/// +/// Contains the payback note configuration and the requested asset that the +/// swap creator wants to receive in exchange for the offered asset contained +/// in the note's vault. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SwapNoteStorage { + payback_note_type: NoteType, + payback_tag: NoteTag, + payback_attachment: NoteAttachment, + requested_asset: Asset, + payback_recipient_digest: Word, +} + +impl SwapNoteStorage { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the SWAP note. + pub const NUM_ITEMS: usize = 20; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates new SWAP note storage with the specified parameters. + pub fn new( + sender: AccountId, + requested_asset: Asset, + payback_note_type: NoteType, + payback_attachment: NoteAttachment, + payback_serial_number: Word, + ) -> Self { + let payback_recipient = P2idNoteStorage::new(sender).into_recipient(payback_serial_number); + let payback_tag = NoteTag::with_account_target(sender); + + Self::from_parts( + payback_note_type, + payback_tag, + payback_attachment, + requested_asset, + payback_recipient.digest(), + ) + } + + /// Creates a [`SwapNoteStorage`] from raw parts. + pub fn from_parts( + payback_note_type: NoteType, + payback_tag: NoteTag, + payback_attachment: NoteAttachment, + requested_asset: Asset, + payback_recipient_digest: Word, + ) -> Self { + Self { + payback_note_type, + payback_tag, + payback_attachment, + requested_asset, + payback_recipient_digest, + } + } + + /// Returns the payback note type. + pub fn payback_note_type(&self) -> NoteType { + self.payback_note_type + } + + /// Returns the payback note tag. + pub fn payback_tag(&self) -> NoteTag { + self.payback_tag + } + + /// Returns the payback note attachment. + pub fn payback_attachment(&self) -> &NoteAttachment { + &self.payback_attachment + } + + /// Returns the requested asset. + pub fn requested_asset(&self) -> Asset { + self.requested_asset + } + + /// Returns the payback recipient digest. + pub fn payback_recipient_digest(&self) -> Word { + self.payback_recipient_digest + } + + /// Consumes the storage and returns a SWAP [`NoteRecipient`] with the provided serial number. + /// + /// Notes created with this recipient will be SWAP notes whose storage encodes the payback + /// configuration and the requested asset stored in this [`SwapNoteStorage`]. + pub fn into_recipient(self, serial_num: Word) -> NoteRecipient { + NoteRecipient::new(serial_num, SwapNote::script(), NoteStorage::from(self)) + } +} + +impl From for NoteStorage { + fn from(storage: SwapNoteStorage) -> Self { + let attachment_scheme = Felt::from(storage.payback_attachment.attachment_scheme().as_u32()); + let attachment_kind = Felt::from(storage.payback_attachment.attachment_kind().as_u8()); + let attachment = storage.payback_attachment.content().to_word(); + + let mut storage_values = Vec::with_capacity(SwapNoteStorage::NUM_ITEMS); + storage_values.extend_from_slice(&[ + storage.payback_note_type.into(), + storage.payback_tag.into(), + attachment_scheme, + attachment_kind, + ]); + storage_values.extend_from_slice(attachment.as_elements()); + storage_values.extend_from_slice(&storage.requested_asset.as_elements()); + storage_values.extend_from_slice(storage.payback_recipient_digest.as_elements()); + + NoteStorage::new(storage_values) + .expect("number of storage items should not exceed max storage items") + } +} + +// NOTE: TryFrom<&[Felt]> for SwapNoteStorage is not implemented because +// array attachment content cannot be reconstructed from storage alone. See https://github.com/0xMiden/protocol/issues/2555 + // TESTS // ================================================================================================ #[cfg(test)] mod tests { - use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; + use miden_protocol::Felt; + use miden_protocol::account::{AccountIdVersion, AccountStorageMode, AccountType}; use miden_protocol::asset::{FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; - use miden_protocol::{self}; + use miden_protocol::note::{NoteAttachment, NoteStorage, NoteTag, NoteType}; + use miden_protocol::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, + }; use super::*; + fn fungible_faucet() -> AccountId { + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap() + } + + fn non_fungible_faucet() -> AccountId { + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into().unwrap() + } + + fn fungible_asset() -> Asset { + Asset::Fungible(FungibleAsset::new(fungible_faucet(), 1000).unwrap()) + } + + fn non_fungible_asset() -> Asset { + let details = + NonFungibleAssetDetails::new(non_fungible_faucet(), vec![0xaa, 0xbb]).unwrap(); + Asset::NonFungible(NonFungibleAsset::new(&details).unwrap()) + } + + #[test] + fn swap_note_storage() { + let payback_note_type = NoteType::Private; + let payback_tag = NoteTag::new(0x12345678); + let payback_attachment = NoteAttachment::default(); + let requested_asset = fungible_asset(); + let payback_recipient_digest = + Word::new([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]); + + let storage = SwapNoteStorage::from_parts( + payback_note_type, + payback_tag, + payback_attachment.clone(), + requested_asset, + payback_recipient_digest, + ); + + assert_eq!(storage.payback_note_type(), payback_note_type); + assert_eq!(storage.payback_tag(), payback_tag); + assert_eq!(storage.payback_attachment(), &payback_attachment); + assert_eq!(storage.requested_asset(), requested_asset); + assert_eq!(storage.payback_recipient_digest(), payback_recipient_digest); + + // Convert to NoteStorage + let note_storage = NoteStorage::from(storage); + assert_eq!(note_storage.num_items() as usize, SwapNoteStorage::NUM_ITEMS); + } + + #[test] + fn swap_note_storage_with_non_fungible_asset() { + let payback_note_type = NoteType::Public; + let payback_tag = NoteTag::new(0xaabbccdd); + let payback_attachment = NoteAttachment::default(); + let requested_asset = non_fungible_asset(); + let payback_recipient_digest = + Word::new([Felt::new(10), Felt::new(20), Felt::new(30), Felt::new(40)]); + + let storage = SwapNoteStorage::from_parts( + payback_note_type, + payback_tag, + payback_attachment, + requested_asset, + payback_recipient_digest, + ); + + assert_eq!(storage.payback_note_type(), payback_note_type); + assert_eq!(storage.requested_asset(), requested_asset); + + let note_storage = NoteStorage::from(storage); + assert_eq!(note_storage.num_items() as usize, SwapNoteStorage::NUM_ITEMS); + } + #[test] fn swap_tag() { // Construct an ID that starts with 0xcdb1. @@ -216,8 +402,7 @@ mod tests { AccountIdVersion::Version0, AccountType::NonFungibleFaucet, AccountStorageMode::Public, - ) - .prefix(), + ), vec![0xaa, 0xbb, 0xcc, 0xdd], ) .unwrap(), diff --git a/crates/miden-standards/src/standards_lib.rs b/crates/miden-standards/src/standards_lib.rs index b8429b6acb..effda29a16 100644 --- a/crates/miden-standards/src/standards_lib.rs +++ b/crates/miden-standards/src/standards_lib.rs @@ -59,7 +59,7 @@ mod tests { #[test] fn test_compile() { - let path = Path::new("::miden::standards::faucets::basic_fungible::distribute"); + let path = Path::new("::miden::standards::faucets::basic_fungible::mint_and_send"); let miden = StandardsLib::default(); let exists = miden.0.module_infos().any(|module| { module diff --git a/crates/miden-standards/src/testing/account_component/conditional_auth.rs b/crates/miden-standards/src/testing/account_component/conditional_auth.rs index 47b7fb6415..64a5f4ce46 100644 --- a/crates/miden-standards/src/testing/account_component/conditional_auth.rs +++ b/crates/miden-standards/src/testing/account_component/conditional_auth.rs @@ -1,7 +1,7 @@ use alloc::string::String; use miden_protocol::account::component::AccountComponentMetadata; -use miden_protocol::account::{AccountComponent, AccountComponentCode}; +use miden_protocol::account::{AccountComponent, AccountComponentCode, AccountType}; use miden_protocol::utils::sync::LazyLock; use crate::code_builder::CodeBuilder; @@ -15,6 +15,7 @@ static CONDITIONAL_AUTH_CODE: LazyLock = LazyLock::new(|| { const WRONG_ARGS="{ERR_WRONG_ARGS_MSG}" + @auth_script pub proc auth_conditional # => [AUTH_ARGS] @@ -51,9 +52,9 @@ pub struct ConditionalAuthComponent; impl From for AccountComponent { fn from(_: ConditionalAuthComponent) -> Self { - let metadata = AccountComponentMetadata::new("miden::testing::conditional_auth") - .with_description("Testing auth component with conditional behavior") - .with_supports_all_types(); + let metadata = + AccountComponentMetadata::new("miden::testing::conditional_auth", AccountType::all()) + .with_description("Testing auth component with conditional behavior"); AccountComponent::new(CONDITIONAL_AUTH_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") diff --git a/crates/miden-standards/src/testing/account_component/incr_nonce.rs b/crates/miden-standards/src/testing/account_component/incr_nonce.rs index ff1f88f91d..95c4158c64 100644 --- a/crates/miden-standards/src/testing/account_component/incr_nonce.rs +++ b/crates/miden-standards/src/testing/account_component/incr_nonce.rs @@ -1,5 +1,5 @@ -use miden_protocol::account::AccountComponent; use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{AccountComponent, AccountType}; use miden_protocol::assembly::Library; use miden_protocol::utils::sync::LazyLock; @@ -8,6 +8,7 @@ use crate::code_builder::CodeBuilder; const INCR_NONCE_AUTH_CODE: &str = " use miden::protocol::native_account + @auth_script pub proc auth_incr_nonce exec.native_account::incr_nonce drop end @@ -17,7 +18,7 @@ static INCR_NONCE_AUTH_LIBRARY: LazyLock = LazyLock::new(|| { CodeBuilder::default() .compile_component_code("incr_nonce", INCR_NONCE_AUTH_CODE) .expect("incr nonce code should be valid") - .into_library() + .into() }); /// Creates a mock authentication [`AccountComponent`] for testing purposes under the "incr_nonce" @@ -28,9 +29,9 @@ pub struct IncrNonceAuthComponent; impl From for AccountComponent { fn from(_: IncrNonceAuthComponent) -> Self { - let metadata = AccountComponentMetadata::new("miden::testing::incr_nonce_auth") - .with_description("Testing auth component that always increments nonce") - .with_supports_all_types(); + let metadata = + AccountComponentMetadata::new("miden::testing::incr_nonce_auth", AccountType::all()) + .with_description("Testing auth component that always increments nonce"); AccountComponent::new(INCR_NONCE_AUTH_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") diff --git a/crates/miden-standards/src/testing/account_component/mock_account_component.rs b/crates/miden-standards/src/testing/account_component/mock_account_component.rs index 72e024a48a..e3e089e2cb 100644 --- a/crates/miden-standards/src/testing/account_component/mock_account_component.rs +++ b/crates/miden-standards/src/testing/account_component/mock_account_component.rs @@ -1,7 +1,13 @@ use alloc::vec::Vec; use miden_protocol::account::component::AccountComponentMetadata; -use miden_protocol::account::{AccountCode, AccountComponent, AccountStorage, StorageSlot}; +use miden_protocol::account::{ + AccountCode, + AccountComponent, + AccountStorage, + AccountType, + StorageSlot, +}; use crate::testing::mock_account_code::MockAccountCodeExt; @@ -55,9 +61,9 @@ impl MockAccountComponent { impl From for AccountComponent { fn from(mock_component: MockAccountComponent) -> Self { - let metadata = AccountComponentMetadata::new("miden::testing::mock_account") - .with_description("Mock account component for testing") - .with_supports_all_types(); + let metadata = + AccountComponentMetadata::new("miden::testing::mock_account", AccountType::all()) + .with_description("Mock account component for testing"); AccountComponent::new( AccountCode::mock_account_library(), diff --git a/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs b/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs index 3e82734817..23cffa2ec3 100644 --- a/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs +++ b/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs @@ -19,10 +19,11 @@ pub struct MockFaucetComponent; impl From for AccountComponent { fn from(_: MockFaucetComponent) -> Self { - let metadata = AccountComponentMetadata::new("miden::testing::mock_faucet") - .with_description("Mock faucet component for testing") - .with_supported_type(AccountType::FungibleFaucet) - .with_supported_type(AccountType::NonFungibleFaucet); + let metadata = AccountComponentMetadata::new( + "miden::testing::mock_faucet", + [AccountType::FungibleFaucet, AccountType::NonFungibleFaucet], + ) + .with_description("Mock faucet component for testing"); AccountComponent::new(AccountCode::mock_faucet_library(), vec![], metadata).expect( "mock faucet component should satisfy the requirements of a valid account component", diff --git a/crates/miden-standards/src/testing/mock_account_code.rs b/crates/miden-standards/src/testing/mock_account_code.rs index cabcb23028..48de0e4d32 100644 --- a/crates/miden-standards/src/testing/mock_account_code.rs +++ b/crates/miden-standards/src/testing/mock_account_code.rs @@ -7,18 +7,18 @@ use crate::code_builder::CodeBuilder; const MOCK_FAUCET_CODE: &str = " use miden::protocol::faucet - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET, pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [NEW_ASSET_VALUE, pad(12)] pub proc mint exec.faucet::mint - # => [ASSET, pad(12)] + # => [NEW_ASSET_VALUE, pad(12)] end - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET, pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [pad(16)] pub proc burn exec.faucet::burn - # => [ASSET, pad(12)] + # => [pad(16)] end "; @@ -105,18 +105,18 @@ const MOCK_ACCOUNT_CODE: &str = " # => [STORAGE_COMMITMENT, pad(12)] end - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET', pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [ASSET_VALUE', pad(12)] pub proc add_asset exec.native_account::add_asset - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] end - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET, pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [REMAINING_ASSET_VALUE, pad(12)] pub proc remove_asset exec.native_account::remove_asset - # => [ASSET, pad(12)] + # => [REMAINING_ASSET_VALUE, pad(12)] end #! Inputs: [pad(16)] @@ -142,14 +142,14 @@ static MOCK_FAUCET_LIBRARY: LazyLock = LazyLock::new(|| { CodeBuilder::default() .compile_component_code("mock::faucet", MOCK_FAUCET_CODE) .expect("mock faucet code should be valid") - .into_library() + .into() }); static MOCK_ACCOUNT_LIBRARY: LazyLock = LazyLock::new(|| { CodeBuilder::default() .compile_component_code("mock::account", MOCK_ACCOUNT_CODE) .expect("mock account code should be valid") - .into_library() + .into() }); // MOCK ACCOUNT CODE EXT diff --git a/crates/miden-standards/src/testing/mock_util_lib.rs b/crates/miden-standards/src/testing/mock_util_lib.rs new file mode 100644 index 0000000000..211feed5d9 --- /dev/null +++ b/crates/miden-standards/src/testing/mock_util_lib.rs @@ -0,0 +1,75 @@ +use miden_protocol::assembly::Library; +use miden_protocol::assembly::diagnostics::NamedSource; +use miden_protocol::transaction::TransactionKernel; +use miden_protocol::utils::sync::LazyLock; + +use crate::StandardsLib; + +const MOCK_UTIL_LIBRARY_CODE: &str = " + use miden::protocol::output_note + use miden::standards::wallets::basic->wallet + + #! Inputs: [] + #! Outputs: [note_idx] + pub proc create_default_note + push.1.2.3.4 # = RECIPIENT + push.2 # = NoteType::Private + push.0 # = NoteTag + # => [tag, note_type, RECIPIENT] + + exec.output_note::create + # => [note_idx] + end + + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [] + pub proc create_default_note_with_asset + exec.create_default_note + # => [note_idx, ASSET_KEY, ASSET_VALUE] + + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, note_idx] + + exec.output_note::add_asset + # => [] + end + + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [] + pub proc create_default_note_with_moved_asset + exec.create_default_note + # => [note_idx, ASSET_KEY, ASSET_VALUE] + + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, note_idx] + + exec.move_asset_to_note + # => [] + end + + #! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] + #! Outputs: [] + pub proc move_asset_to_note + repeat.7 push.0 movdn.9 end + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + call.wallet::move_asset_to_note + + dropw dropw dropw dropw + end +"; + +static MOCK_UTIL_LIBRARY: LazyLock = LazyLock::new(|| { + TransactionKernel::assembler() + .with_dynamic_library(StandardsLib::default()) + .expect("dynamically linking standards library should work") + .assemble_library([NamedSource::new("mock::util", MOCK_UTIL_LIBRARY_CODE)]) + .expect("mock util library should be valid") +}); + +/// Returns the mock test [`Library`] under the `mock::util` namespace. +/// +/// This provides convenient wrappers for testing purposes. +pub fn mock_util_library() -> Library { + MOCK_UTIL_LIBRARY.clone() +} diff --git a/crates/miden-standards/src/testing/mod.rs b/crates/miden-standards/src/testing/mod.rs index f08811b562..01cf73f63c 100644 --- a/crates/miden-standards/src/testing/mod.rs +++ b/crates/miden-standards/src/testing/mod.rs @@ -4,4 +4,5 @@ pub mod account_interface; pub mod mock_account; pub mod mock_account_code; +pub mod mock_util_lib; pub mod note; diff --git a/crates/miden-standards/src/testing/note.rs b/crates/miden-standards/src/testing/note.rs index cc077e35ef..6c8d4ef1a1 100644 --- a/crates/miden-standards/src/testing/note.rs +++ b/crates/miden-standards/src/testing/note.rs @@ -13,11 +13,13 @@ use miden_protocol::note::{ NoteAttachment, NoteMetadata, NoteRecipient, + NoteScript, NoteStorage, NoteTag, NoteType, }; use miden_protocol::testing::note::DEFAULT_NOTE_CODE; +use miden_protocol::vm::Package; use miden_protocol::{Felt, Word}; use rand::Rng; @@ -26,6 +28,15 @@ use crate::code_builder::CodeBuilder; // NOTE BUILDER // ================================================================================================ +#[derive(Debug, Clone)] +enum SourceCodeOrigin { + Masm { + dyn_libraries: Vec, + source_manager: Arc, + }, + Package(Arc), +} + #[derive(Debug, Clone)] pub struct NoteBuilder { sender: AccountId, @@ -36,8 +47,7 @@ pub struct NoteBuilder { tag: NoteTag, code: String, attachment: NoteAttachment, - dyn_libraries: Vec, - source_manager: Arc, + source_code: SourceCodeOrigin, } impl NoteBuilder { @@ -59,8 +69,10 @@ impl NoteBuilder { tag: NoteTag::with_account_target(sender), code: DEFAULT_NOTE_CODE.to_string(), attachment: NoteAttachment::default(), - dyn_libraries: Vec::new(), - source_manager: Arc::new(DefaultSourceManager::default()), + source_code: SourceCodeOrigin::Masm { + dyn_libraries: Vec::new(), + source_manager: Arc::new(DefaultSourceManager::default()), + }, } } @@ -112,42 +124,68 @@ impl NoteBuilder { /// build-time. pub fn dynamically_linked_libraries( mut self, - dyn_libraries: impl IntoIterator, + dyn_libs: impl IntoIterator, ) -> Self { - self.dyn_libraries.extend(dyn_libraries); + match &mut self.source_code { + SourceCodeOrigin::Masm { dyn_libraries, .. } => { + dyn_libraries.extend(dyn_libs); + }, + SourceCodeOrigin::Package(_) => { + panic!("dynamic libraries cannot be set on a package") + }, + } self } - pub fn source_manager(mut self, source_manager: Arc) -> Self { - self.source_manager = source_manager; + pub fn source_manager(mut self, sm: Arc) -> Self { + match &mut self.source_code { + SourceCodeOrigin::Masm { source_manager, .. } => { + *source_manager = sm; + }, + SourceCodeOrigin::Package(_) => { + panic!("source manager cannot be set on a package") + }, + } + self + } + + /// Sets the source code origin to a package. + pub fn package(mut self, package: Package) -> Self { + self.source_code = SourceCodeOrigin::Package(Arc::new(package)); self } pub fn build(self) -> Result { - // Generate a unique file name from the note's serial number, which should be unique per - // note. Only includes two elements in the file name which should be enough for the - // uniqueness in the testing context and does not result in overly long file names which do - // not render well in all situations. - let virtual_source_file = self.source_manager.load( - SourceLanguage::Masm, - Uri::new(format!( - "note_{:x}{:x}", - self.serial_num[0].as_int(), - self.serial_num[1].as_int() - )), - self.code, - ); - - let mut builder = CodeBuilder::with_source_manager(self.source_manager.clone()); - for dyn_library in self.dyn_libraries { - builder - .link_dynamic_library(&dyn_library) - .expect("library should link successfully"); - } + let note_script = match self.source_code { + SourceCodeOrigin::Masm { dyn_libraries, source_manager } => { + // Generate a unique file name from the note's serial number, which should be + // unique per note. Only includes two elements in the file name which should be + // enough for the uniqueness in the testing context and does not result in overly + // long file names which do not render well in all situations. + let virtual_source_file = source_manager.load( + SourceLanguage::Masm, + Uri::new(format!( + "note_{:x}{:x}", + self.serial_num[0].as_canonical_u64(), + self.serial_num[1].as_canonical_u64() + )), + self.code, + ); + + let mut builder = CodeBuilder::with_source_manager(source_manager.clone()); + for dyn_library in dyn_libraries { + builder + .link_dynamic_library(&dyn_library) + .expect("library should link successfully"); + } + + builder + .compile_note_script(virtual_source_file) + .expect("note script should compile") + }, + SourceCodeOrigin::Package(package) => NoteScript::from_package(&package)?, + }; - let note_script = builder - .compile_note_script(virtual_source_file) - .expect("note script should compile"); let vault = NoteAssets::new(self.assets)?; let metadata = NoteMetadata::new(self.sender, self.note_type) .with_tag(self.tag) diff --git a/crates/miden-standards/src/utils/mod.rs b/crates/miden-standards/src/utils/mod.rs new file mode 100644 index 0000000000..d245b85214 --- /dev/null +++ b/crates/miden-standards/src/utils/mod.rs @@ -0,0 +1 @@ +pub mod string; diff --git a/crates/miden-standards/src/utils/string.rs b/crates/miden-standards/src/utils/string.rs new file mode 100644 index 0000000000..5716961846 --- /dev/null +++ b/crates/miden-standards/src/utils/string.rs @@ -0,0 +1,323 @@ +//! Fixed-width UTF-8 string stored as N Words (7 bytes/felt, length-prefixed). +//! +//! [`FixedWidthString`] is the generic building block for encoding arbitrary UTF-8 strings into +//! a fixed number of storage words. `N` must be at most 9; with N=9 the capacity is 9×4×7−1 = 251 +//! bytes, which is the maximum that fits in the u8 length prefix (leaving 251 bytes for payload). +//! The maximum storable string length is therefore **251 bytes** (when N=9). +//! +//! ## Buffer layout (N × 4 × 7 bytes) +//! +//! ```text +//! Byte 0: string length (u8) +//! Bytes 1..1+len: UTF-8 content +//! Remaining: zero-padded +//! ``` +//! +//! Each 7-byte chunk is stored as a little-endian `u64` with the high byte always zero, so the +//! value is always < 2^56 and fits safely in a Goldilocks field element. + +use alloc::boxed::Box; +use alloc::string::String; +use alloc::vec::Vec; + +use miden_protocol::{Felt, WORD_SIZE, Word}; + +// ENCODING CONSTANT +// ================================================================================================ + +/// Number of data bytes packed into each felt (7 bytes = 56 bits, always < Goldilocks prime). +const BYTES_PER_FELT: usize = 7; + +// FIXED-WIDTH STRING +// ================================================================================================ + +/// A UTF-8 string stored in exactly `N` Words (N×4 felts, 7 bytes/felt, length-prefixed). +/// +/// `N` must be at most 9. With N=9 the maximum storable string length is **251 bytes** (the +/// full buffer is 252 bytes, one of which is consumed by the length prefix). Higher-level wrapper +/// types may impose a tighter limit. +/// +/// Using N=10 (or larger) fails at compile time: +/// +/// ```compile_fail +/// # use miden_standards::utils::string::FixedWidthString; +/// let _ = FixedWidthString::<10>::CAPACITY; // assertion failed: N <= 9 +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FixedWidthString(Box); + +impl Default for FixedWidthString { + fn default() -> Self { + Self("".into()) + } +} + +/// Maximum storable string length (one byte is used for the length prefix). +const MAX_PAYLOAD_BYTES: usize = 251; + +impl FixedWidthString { + /// Compile-time check: N must be at most 9 so that CAPACITY ≤ 251 and the length + /// fits in the u8 prefix. (Referenced by CAPACITY so the assert is always evaluated.) + const _CAPACITY_FITS_LENGTH_PREFIX: () = assert!(N <= 9); + + /// Maximum bytes that can be stored (full capacity of the N words minus the length byte). + /// Never exceeds 251 because the length is encoded in a single u8 (bytes 0..=251). + pub const CAPACITY: usize = + N * 4 * BYTES_PER_FELT - 1 + (Self::_CAPACITY_FITS_LENGTH_PREFIX, 0).1; + + /// Creates a [`FixedWidthString`] from a UTF-8 string, validating it fits within capacity. + pub fn new(value: &str) -> Result { + if value.len() > Self::CAPACITY { + return Err(FixedWidthStringError::TooLong { + actual: value.len(), + max: Self::CAPACITY, + }); + } + Ok(Self(value.into())) + } + + /// Returns the string content. + pub fn as_str(&self) -> &str { + &self.0 + } + + /// Encodes the string into `N` Words (7 bytes/felt, length-prefixed, zero-padded). + pub fn to_words(&self) -> Vec { + let n_felts = N * WORD_SIZE; + let buf_len = n_felts * BYTES_PER_FELT; + let bytes = self.0.as_bytes(); + debug_assert!(bytes.len() < buf_len); + + let mut buf = alloc::vec![0u8; buf_len]; + buf[0] = bytes.len() as u8; + buf[1..1 + bytes.len()].copy_from_slice(bytes); + + (0..N) + .map(|word_idx| { + let felts: [Felt; 4] = core::array::from_fn(|felt_idx| { + let start = (word_idx * 4 + felt_idx) * BYTES_PER_FELT; + let mut le_bytes = [0u8; 8]; + le_bytes[..BYTES_PER_FELT].copy_from_slice(&buf[start..start + BYTES_PER_FELT]); + Felt::try_from(u64::from_le_bytes(le_bytes)) + .expect("7-byte LE value always fits in a Goldilocks felt") + }); + Word::from(felts) + }) + .collect() + } + + /// Decodes a [`FixedWidthString`] from a slice of exactly `N` Words. + pub fn try_from_words(words: &[Word]) -> Result { + if words.len() != N { + return Err(FixedWidthStringError::InvalidLength { expected: N, got: words.len() }); + } + let n_felts = N * WORD_SIZE; + let buf_len = n_felts * BYTES_PER_FELT; + let mut buf = alloc::vec![0u8; buf_len]; + + for (word_idx, word) in words.iter().enumerate() { + for (felt_idx, felt) in word.as_slice().iter().enumerate() { + let felt_value = felt.as_canonical_u64(); + let le_bytes = felt_value.to_le_bytes(); + if le_bytes[BYTES_PER_FELT] != 0 { + return Err(FixedWidthStringError::InvalidPadding); + } + let start = (word_idx * 4 + felt_idx) * BYTES_PER_FELT; + buf[start..start + BYTES_PER_FELT].copy_from_slice(&le_bytes[..BYTES_PER_FELT]); + } + } + + let len = buf[0] as usize; + if len > MAX_PAYLOAD_BYTES { + return Err(FixedWidthStringError::InvalidLengthPrefix); + } + if len + 1 > buf_len { + return Err(FixedWidthStringError::InvalidLengthPrefix); + } + String::from_utf8(buf[1..1 + len].to_vec()) + .map_err(FixedWidthStringError::InvalidUtf8) + .map(|s| Self(s.into())) + } +} + +// ERROR TYPE +// ================================================================================================ + +/// Error type for [`FixedWidthString`] construction and decoding. +#[derive(Debug, Clone, thiserror::Error)] +pub enum FixedWidthStringError { + /// String exceeds the maximum capacity for this word width. + #[error("string must be at most {max} bytes, got {actual}")] + TooLong { actual: usize, max: usize }, + /// Decoded bytes are not valid UTF-8. + #[error("string is not valid UTF-8")] + InvalidUtf8(#[source] alloc::string::FromUtf8Error), + /// A felt's high byte (byte index 7 in LE) is non-zero, violating the 7-bytes-per-felt + /// invariant. + #[error("felt high byte is non-zero (invalid padding)")] + InvalidPadding, + /// The length prefix byte claims more bytes than the buffer can hold, or the length is >= 252. + #[error("length prefix is invalid or exceeds buffer capacity")] + InvalidLengthPrefix, + /// Slice length does not match the expected word count. + #[error("expected {expected} words, got {got}")] + InvalidLength { expected: usize, got: usize }, +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn empty_string_roundtrip() { + let s: FixedWidthString<2> = FixedWidthString::new("").unwrap(); + let words = s.to_words(); + assert_eq!(words.len(), 2); + let decoded = FixedWidthString::<2>::try_from_words(&words).unwrap(); + assert_eq!(decoded.as_str(), ""); + } + + #[test] + fn ascii_roundtrip_2_words() { + let s = FixedWidthString::<2>::new("hello").unwrap(); + let decoded = FixedWidthString::<2>::try_from_words(&s.to_words()).unwrap(); + assert_eq!(decoded.as_str(), "hello"); + } + + #[test] + fn ascii_roundtrip_7_words() { + let text = "A longer description that spans many felts"; + let s = FixedWidthString::<7>::new(text).unwrap(); + let decoded = FixedWidthString::<7>::try_from_words(&s.to_words()).unwrap(); + assert_eq!(decoded.as_str(), text); + } + + #[test] + fn utf8_multibyte_roundtrip() { + // "café" — contains a 2-byte UTF-8 sequence + let s = FixedWidthString::<2>::new("café").unwrap(); + let decoded = FixedWidthString::<2>::try_from_words(&s.to_words()).unwrap(); + assert_eq!(decoded.as_str(), "café"); + } + + #[test] + fn exactly_at_capacity_accepted() { + let cap = FixedWidthString::<2>::CAPACITY; // 2*4*7 - 1 = 55 + let s = "a".repeat(cap); + assert!(FixedWidthString::<2>::new(&s).is_ok()); + } + + #[test] + fn one_over_capacity_rejected() { + let cap = FixedWidthString::<2>::CAPACITY; + let s = "a".repeat(cap + 1); + assert!(matches!( + FixedWidthString::<2>::new(&s), + Err(FixedWidthStringError::TooLong { .. }) + )); + } + + #[test] + fn capacity_7_words() { + // 7*4*7 - 1 = 195 + assert_eq!(FixedWidthString::<7>::CAPACITY, 195); + let s = "b".repeat(195); + let fw = FixedWidthString::<7>::new(&s).unwrap(); + let decoded = FixedWidthString::<7>::try_from_words(&fw.to_words()).unwrap(); + assert_eq!(decoded.as_str(), s); + } + + #[test] + fn capacity_9_words_is_max() { + // Max N is 9: 9*4*7 - 1 = 251 (one byte for length prefix). + assert_eq!(FixedWidthString::<9>::CAPACITY, 251); + let s = "x".repeat(251); + let fw = FixedWidthString::<9>::new(&s).unwrap(); + let decoded = FixedWidthString::<9>::try_from_words(&fw.to_words()).unwrap(); + assert_eq!(decoded.as_str(), s); + } + + #[test] + #[allow(clippy::assertions_on_constants)] + fn n10_would_exceed_length_prefix() { + // N=10 would give 10*4*7 - 1 = 279 > 251, so it is disallowed. CAPACITY is defined so + // that it depends on _CAPACITY_FITS_LENGTH_PREFIX; therefore any use of + // FixedWidthString::<10> (e.g. CAPACITY) fails at compile time with "assertion failed: N <= + // 9". That compile-time failure is also tested by the `compile_fail` doctest in the + // doc comment above (on [`FixedWidthString`]). + assert!(10 * 4 * BYTES_PER_FELT - 1 > MAX_PAYLOAD_BYTES); + } + + #[test] + fn to_words_returns_correct_count() { + let s = FixedWidthString::<7>::new("test").unwrap(); + assert_eq!(s.to_words().len(), 7); + } + + #[test] + fn wrong_word_count_returns_error() { + let s = FixedWidthString::<2>::new("hi").unwrap(); + let words = s.to_words(); + // pass only 1 word instead of 2 + assert!(matches!( + FixedWidthString::<2>::try_from_words(&words[..1]), + Err(FixedWidthStringError::InvalidLength { expected: 2, got: 1 }) + )); + } + + #[test] + fn length_prefix_overflow_returns_invalid_length_prefix() { + // The length byte (first byte of first felt) is set to 0xFF, which exceeds the buffer + // and triggers InvalidLengthPrefix. (This is the low byte of the felt, not the high byte.) + let overflow_len = Felt::try_from(0xff_u64).unwrap(); + let words = [ + Word::from([overflow_len, Felt::ZERO, Felt::ZERO, Felt::ZERO]), + Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::ZERO]), + ]; + assert!(matches!( + FixedWidthString::<2>::try_from_words(&words), + Err(FixedWidthStringError::InvalidLengthPrefix) + )); + } + + #[test] + fn felt_with_high_byte_set_returns_invalid_padding() { + // Construct words where one felt has its 8th byte (LE index 7) non-zero, violating the + // 7-bytes-per-felt invariant. Bit 63 set gives a valid Felt but invalid length/padding. + let high_byte_non_zero = Felt::try_from(2u64.pow(63)).unwrap(); + let words = [ + Word::from([Felt::ZERO, high_byte_non_zero, Felt::ZERO, Felt::ZERO]), + Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::ZERO]), + ]; + assert!(matches!( + FixedWidthString::<2>::try_from_words(&words), + Err(FixedWidthStringError::InvalidPadding) + )); + } + + #[test] + fn non_utf8_bytes_return_invalid_utf8() { + // Encode raw bytes that are not valid UTF-8 (e.g. 0xFF byte in content). + // Length byte = 1, content byte = 0xFF (invalid UTF-8 start byte). + // Pack into first felt: LE bytes [1, 0xFF, 0, 0, 0, 0, 0] → u64 = 0x0000_0000_0000_ff01 + let raw: u64 = 0x0000_0000_0000_ff01; + let bad_felt = Felt::try_from(raw).unwrap(); + let words = [ + Word::from([bad_felt, Felt::ZERO, Felt::ZERO, Felt::ZERO]), + Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::ZERO]), + ]; + assert!(matches!( + FixedWidthString::<2>::try_from_words(&words), + Err(FixedWidthStringError::InvalidUtf8(_)) + )); + } + + #[test] + fn default_is_empty_string() { + let s: FixedWidthString<2> = FixedWidthString::default(); + assert_eq!(s.as_str(), ""); + } +} diff --git a/crates/miden-testing/Cargo.toml b/crates/miden-testing/Cargo.toml index 3ed26233cc..28b86046ab 100644 --- a/crates/miden-testing/Cargo.toml +++ b/crates/miden-testing/Cargo.toml @@ -41,17 +41,15 @@ itertools = { default-features = false, features = ["use_alloc"], version = "0 rand = { features = ["os_rng", "small_rng"], workspace = true } rand_chacha = { workspace = true } thiserror = { workspace = true } -winterfell = { version = "0.13" } [dev-dependencies] -anyhow = { features = ["backtrace", "std"], workspace = true } -assert_matches = { workspace = true } -hex = { version = "0.4" } -miden-crypto = { workspace = true } -miden-protocol = { features = ["std"], workspace = true } -primitive-types = { workspace = true } -rstest = { workspace = true } -serde = { features = ["derive"], workspace = true } -serde_json = { version = "1.0" } -tokio = { features = ["macros", "rt"], workspace = true } -winter-rand-utils = { version = "0.13" } +anyhow = { features = ["backtrace", "std"], workspace = true } +assert_matches = { workspace = true } +hex = { version = "0.4" } +miden-crypto = { workspace = true } +miden-protocol = { features = ["std"], workspace = true } +primitive-types = { workspace = true } +rstest = { workspace = true } +serde = { features = ["derive"], workspace = true } +serde_json = { features = ["arbitrary_precision"], version = "1.0" } +tokio = { features = ["macros", "rt"], workspace = true } diff --git a/crates/miden-testing/src/executor.rs b/crates/miden-testing/src/executor.rs index c7a417250b..6e0486d502 100644 --- a/crates/miden-testing/src/executor.rs +++ b/crates/miden-testing/src/executor.rs @@ -1,7 +1,7 @@ #[cfg(test)] use miden_processor::DefaultHost; -use miden_processor::fast::{ExecutionOutput, FastProcessor}; -use miden_processor::{AdviceInputs, AsyncHost, Program, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ExecutionOutput, FastProcessor, Host, Program, StackInputs}; #[cfg(test)] use miden_protocol::assembly::Assembler; @@ -17,7 +17,7 @@ pub(crate) struct CodeExecutor { advice_inputs: AdviceInputs, } -impl CodeExecutor { +impl CodeExecutor { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- pub(crate) fn new(host: H) -> Self { @@ -64,16 +64,11 @@ impl CodeExecutor { /// To improve the error message quality, convert the returned [`ExecutionError`] into a /// [`Report`](miden_protocol::assembly::diagnostics::Report). pub async fn execute_program(mut self, program: Program) -> Result { - // This reverses the stack inputs (even though it doesn't look like it does) because the - // fast processor expects the reverse order. - // - // Once we use the FastProcessor for execution and proving, we can change the way these - // inputs are constructed in TransactionKernel::prepare_inputs. - let stack_inputs = - StackInputs::new(self.stack_inputs.unwrap_or_default().iter().copied().collect()) - .unwrap(); - - let processor = FastProcessor::new_debug(stack_inputs.as_slice(), self.advice_inputs); + let stack_inputs = self.stack_inputs.unwrap_or_default(); + + let processor = FastProcessor::new(stack_inputs) + .with_advice(self.advice_inputs) + .with_debugging(true); let execution_output = processor.execute(&program, &mut self.host).await.map_err(ExecError::new)?; diff --git a/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs b/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs index 6a29dc72a4..54cef785ee 100644 --- a/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs +++ b/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs @@ -11,7 +11,13 @@ use miden_protocol::crypto::merkle::MerkleError; use miden_protocol::errors::{BatchAccountUpdateError, ProposedBatchError}; use miden_protocol::note::{Note, NoteType}; use miden_protocol::testing::account_id::AccountIdBuilder; -use miden_protocol::transaction::{InputNote, InputNoteCommitment, OutputNote, PartialBlockchain}; +use miden_protocol::transaction::{ + InputNote, + InputNoteCommitment, + OutputNote, + PartialBlockchain, + RawOutputNote, +}; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::note::NoteBuilder; use rand::rngs::SmallRng; @@ -31,7 +37,7 @@ pub fn mock_note(num: u8) -> Note { } pub fn mock_output_note(num: u8) -> OutputNote { - OutputNote::Full(mock_note(num)) + RawOutputNote::Full(mock_note(num)).to_output_note().unwrap() } struct TestSetup { @@ -91,7 +97,7 @@ fn note_created_and_consumed_in_same_batch() -> anyhow::Result<()> { let tx1 = MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) - .output_notes(vec![OutputNote::Full(note.clone())]) + .output_notes(vec![RawOutputNote::Full(note.clone()).to_output_note().unwrap()]) .build()?; let tx2 = MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) @@ -281,8 +287,8 @@ async fn unauthenticated_note_converted_to_authenticated() -> anyhow::Result<()> let tx = chain .build_tx_context(account1.clone(), &[spawn_note.id()], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(note1.clone()), - OutputNote::Full(note2.clone()), + RawOutputNote::Full(note1.clone()), + RawOutputNote::Full(note2.clone()), ]) .build()? .execute() @@ -303,14 +309,14 @@ async fn unauthenticated_note_converted_to_authenticated() -> anyhow::Result<()> block1 .body() .output_notes() - .any(|(_, note)| note.commitment() == note1.commitment()), + .any(|(_, note)| note.to_commitment() == note1.commitment()), "block 1 should contain note1" ); assert!( block1 .body() .output_notes() - .any(|(_, note)| note.commitment() == note2.commitment()), + .any(|(_, note)| note.to_commitment() == note2.commitment()), "block 1 should contain note2" ); @@ -427,7 +433,7 @@ fn authenticated_note_created_in_same_batch() -> anyhow::Result<()> { let tx1 = MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) - .output_notes(vec![OutputNote::Full(note0.clone())]) + .output_notes(vec![RawOutputNote::Full(note0.clone()).to_output_note().unwrap()]) .build()?; let tx2 = MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) @@ -550,7 +556,11 @@ fn input_and_output_notes_commitment() -> anyhow::Result<()> { MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note4.clone(), note6.clone()]) - .output_notes(vec![OutputNote::Full(note1.clone()), note2.clone(), note3.clone()]) + .output_notes(vec![ + RawOutputNote::Full(note1.clone()).to_output_note().unwrap(), + note2.clone(), + note3.clone(), + ]) .build()?; let batch = ProposedBatch::new( @@ -563,7 +573,7 @@ fn input_and_output_notes_commitment() -> anyhow::Result<()> { // We expect note1 to be erased from the input/output notes as it is created and consumed // in the batch. let mut expected_output_notes = [note0, note2, note3]; - // We expect a vector sorted by NoteId (since InputOutputNoteTracker is set up that way). + // We expect a vector sorted by NoteId. expected_output_notes.sort_unstable_by_key(OutputNote::id); assert_eq!(batch.output_notes().len(), 3); @@ -655,13 +665,13 @@ fn circular_note_dependency() -> anyhow::Result<()> { MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note_x.clone()]) - .output_notes(vec![OutputNote::Full(note_y.clone())]) + .output_notes(vec![RawOutputNote::Full(note_y.clone()).to_output_note().unwrap()]) .build()?; let tx2 = MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note_y.clone()]) - .output_notes(vec![OutputNote::Full(note_x.clone())]) + .output_notes(vec![RawOutputNote::Full(note_x.clone()).to_output_note().unwrap()]) .build()?; let batch = ProposedBatch::new( @@ -736,7 +746,7 @@ fn noop_tx_before_state_updating_tx_against_same_account() -> anyhow::Result<()> ) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1]) - .output_notes(vec![OutputNote::Full(note.clone())]) + .output_notes(vec![RawOutputNote::Full(note.clone()).to_output_note().unwrap()]) .build()?; // sanity check @@ -797,7 +807,7 @@ fn noop_tx_after_state_updating_tx_against_same_account() -> anyhow::Result<()> ) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1]) - .output_notes(vec![OutputNote::Full(note.clone())]) + .output_notes(vec![RawOutputNote::Full(note.clone()).to_output_note().unwrap()]) .build()?; // sanity check diff --git a/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs b/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs index 95be1c04c5..abf6b8809e 100644 --- a/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs +++ b/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs @@ -3,15 +3,17 @@ use alloc::vec::Vec; use anyhow::Context; use miden_protocol::Word; use miden_protocol::account::AccountId; +use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::asset::FungibleAsset; use miden_protocol::block::BlockNumber; use miden_protocol::crypto::merkle::SparseMerklePath; use miden_protocol::note::{Note, NoteInclusionProof, Nullifier}; use miden_protocol::transaction::{ InputNote, + InputNoteCommitment, OutputNote, ProvenTransaction, - ProvenTransactionBuilder, + TxAccountUpdate, }; use miden_protocol::vm::ExecutionProof; @@ -102,21 +104,36 @@ impl MockProvenTxBuilder { /// Builds the [`ProvenTransaction`] and returns potential errors. pub fn build(self) -> anyhow::Result { - ProvenTransactionBuilder::new( + let mut input_note_commitments: Vec = self + .input_notes + .unwrap_or_default() + .into_iter() + .map(InputNoteCommitment::from) + .collect(); + + // Add nullifiers as input note commitments + input_note_commitments + .extend(self.nullifiers.unwrap_or_default().into_iter().map(InputNoteCommitment::from)); + + let account_update = TxAccountUpdate::new( self.account_id, self.initial_account_commitment, self.final_account_commitment, Word::empty(), + AccountUpdateDetails::Private, + ) + .context("failed to build account update")?; + + ProvenTransaction::new( + account_update, + input_note_commitments, + self.output_notes.unwrap_or_default(), BlockNumber::from(0), self.ref_block_commitment.unwrap_or_default(), self.fee, self.expiration_block_num, ExecutionProof::new_dummy(), ) - .add_input_notes(self.input_notes.unwrap_or_default()) - .add_input_notes(self.nullifiers.unwrap_or_default()) - .add_output_notes(self.output_notes.unwrap_or_default()) - .build() .context("failed to build proven transaction") } } diff --git a/crates/miden-testing/src/kernel_tests/block/header_errors.rs b/crates/miden-testing/src/kernel_tests/block/header_errors.rs index 78cca44f3f..ffc2e1d2c6 100644 --- a/crates/miden-testing/src/kernel_tests/block/header_errors.rs +++ b/crates/miden-testing/src/kernel_tests/block/header_errors.rs @@ -17,7 +17,12 @@ use miden_protocol::batch::ProvenBatch; use miden_protocol::block::{BlockInputs, BlockNumber, ProposedBlock}; use miden_protocol::errors::{AccountTreeError, NullifierTreeError, ProposedBlockError}; use miden_protocol::note::NoteType; -use miden_protocol::transaction::ProvenTransactionBuilder; +use miden_protocol::transaction::{ + InputNoteCommitment, + OutputNote, + ProvenTransaction, + TxAccountUpdate, +}; use miden_protocol::vm::ExecutionProof; use miden_standards::testing::account_component::{IncrNonceAuthComponent, MockAccountComponent}; use miden_standards::testing::mock_account::MockAccountExt; @@ -383,19 +388,25 @@ async fn block_building_fails_on_creating_account_with_duplicate_account_id_pref let [tx0, tx1] = [(id0, [0, 0, 0, 1u32]), (id1, [0, 0, 0, 2u32])].map(|(id, final_state_comm)| { - ProvenTransactionBuilder::new( + let account_update = TxAccountUpdate::new( id, Word::empty(), Word::from(final_state_comm), Word::empty(), + AccountUpdateDetails::Private, + ) + .context("failed to build account update") + .unwrap(); + ProvenTransaction::new( + account_update, + Vec::::new(), + Vec::::new(), genesis_block.block_num(), genesis_block.commitment(), FungibleAsset::mock(500).unwrap_fungible(), BlockNumber::from(u32::MAX), ExecutionProof::new_dummy(), ) - .account_update_details(AccountUpdateDetails::Private) - .build() .context("failed to build proven transaction") .unwrap() }); @@ -428,10 +439,10 @@ async fn block_building_fails_on_creating_account_with_duplicate_account_id_pref let err = block.into_header_and_body().unwrap_err(); - // This should fail when we try to _track_ the same two prefixes in the partial tree. + // This should fail when we try to _insert_ the same two prefixes in the partial tree. assert_matches!( err, - ProposedBlockError::AccountWitnessTracking { + ProposedBlockError::AccountIdPrefixDuplicate { source: AccountTreeError::DuplicateIdPrefix { duplicate_prefix } } if duplicate_prefix == id0.prefix() ); diff --git a/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs b/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs index 397356e8ec..5e537ed9ba 100644 --- a/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs +++ b/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs @@ -3,7 +3,7 @@ use std::collections::BTreeMap; use std::vec::Vec; use assert_matches::assert_matches; -use miden_processor::crypto::MerklePath; +use miden_processor::crypto::merkle::MerklePath; use miden_protocol::MAX_BATCHES_PER_BLOCK; use miden_protocol::asset::FungibleAsset; use miden_protocol::block::{BlockInputs, BlockNumber, ProposedBlock}; diff --git a/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs b/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs index a89f080504..46cd8eb3a6 100644 --- a/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs +++ b/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs @@ -4,14 +4,14 @@ use std::vec::Vec; use anyhow::Context; use assert_matches::assert_matches; +use miden_protocol::Felt; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{Account, AccountId, AccountStorageMode}; use miden_protocol::asset::FungibleAsset; use miden_protocol::block::{BlockInputs, ProposedBlock}; use miden_protocol::note::{Note, NoteType}; use miden_protocol::testing::account_id::ACCOUNT_ID_SENDER; -use miden_protocol::transaction::{ExecutedTransaction, OutputNote, TransactionHeader}; -use miden_protocol::{Felt, FieldElement}; +use miden_protocol::transaction::{ExecutedTransaction, RawOutputNote, TransactionHeader}; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::note::NoteBuilder; use miden_tx::LocalTransactionProver; @@ -171,7 +171,7 @@ async fn proposed_block_aggregates_account_state_transition() -> anyhow::Result< assert_matches!(account_update.details(), AccountUpdateDetails::Delta(delta) => { assert_eq!(delta.vault().fungible().num_assets(), 1); - assert_eq!(delta.vault().fungible().amount(&asset.unwrap_fungible().faucet_id()).unwrap(), 300); + assert_eq!(delta.vault().fungible().amount(&asset.unwrap_fungible().vault_key()).unwrap(), 300); }); Ok(()) @@ -279,8 +279,8 @@ async fn noop_tx_and_state_updating_tx_against_same_account_in_same_block() -> a NoteBuilder::new(ACCOUNT_ID_SENDER.try_into().unwrap(), &mut rand::rng()).build()?; let noop_note1 = NoteBuilder::new(ACCOUNT_ID_SENDER.try_into().unwrap(), &mut rand::rng()).build()?; - builder.add_output_note(OutputNote::Full(noop_note0.clone())); - builder.add_output_note(OutputNote::Full(noop_note1.clone())); + builder.add_output_note(RawOutputNote::Full(noop_note0.clone())); + builder.add_output_note(RawOutputNote::Full(noop_note1.clone())); let mut chain = builder.build()?; let noop_tx = generate_conditional_tx(&mut chain, account0.id(), noop_note0, false).await; @@ -333,11 +333,11 @@ async fn generate_conditional_tx( modify_storage: bool, ) -> ExecutedTransaction { let auth_args = [ + Felt::new(97), + Felt::new(98), + Felt::new(99), // increment nonce if modify_storage is true if modify_storage { Felt::ONE } else { Felt::ZERO }, - Felt::new(99), - Felt::new(98), - Felt::new(97), ]; let tx_context = chain diff --git a/crates/miden-testing/src/kernel_tests/tx/mod.rs b/crates/miden-testing/src/kernel_tests/tx/mod.rs index 6585399501..e8fa628883 100644 --- a/crates/miden-testing/src/kernel_tests/tx/mod.rs +++ b/crates/miden-testing/src/kernel_tests/tx/mod.rs @@ -1,6 +1,5 @@ use anyhow::Context; -use miden_processor::ContextId; -use miden_processor::fast::ExecutionOutput; +use miden_processor::{ContextId, ExecutionOutput}; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId}; use miden_protocol::asset::{Asset, FungibleAsset}; @@ -24,6 +23,7 @@ mod test_array; mod test_asset; mod test_asset_vault; mod test_auth; +mod test_callbacks; mod test_epilogue; mod test_faucet; mod test_fee; @@ -54,12 +54,8 @@ pub trait ExecutionOutputExt { /// Reads an element from the stack. fn get_stack_element(&self, idx: usize) -> Felt; - /// Reads a [`Word`] from the stack in big-endian (reversed) order. - fn get_stack_word_be(&self, index: usize) -> Word; - - /// Reads a [`Word`] from the stack in little-endian (memory) order. - #[allow(dead_code)] - fn get_stack_word_le(&self, index: usize) -> Word; + /// Reads a [`Word`] from the stack in little-endian order. + fn get_stack_word(&self, index: usize) -> Word; /// Reads the [`Word`] of the input note's memory identified by the index at the provided /// `offset`. @@ -72,10 +68,9 @@ impl ExecutionOutputExt for ExecutionOutput { fn get_kernel_mem_word(&self, addr: u32) -> Word { let tx_kernel_context = ContextId::root(); let clk = 0u32; - let err_ctx = (); self.memory - .read_word(tx_kernel_context, Felt::from(addr), clk.into(), &err_ctx) + .read_word(tx_kernel_context, Felt::from(addr), clk.into()) .expect("expected address to be word-aligned") } @@ -83,20 +78,15 @@ impl ExecutionOutputExt for ExecutionOutput { *self.stack.get(index).expect("index must be in bounds") } - fn get_stack_word_be(&self, index: usize) -> Word { - self.stack.get_stack_word_be(index).expect("index must be in bounds") - } - - fn get_stack_word_le(&self, index: usize) -> Word { - self.stack.get_stack_word_le(index).expect("index must be in bounds") + fn get_stack_word(&self, index: usize) -> Word { + self.stack.get_word(index).expect("index must be in bounds") } fn get_kernel_mem_element(&self, addr: u32) -> Felt { let tx_kernel_context = ContextId::root(); - let err_ctx = (); self.memory - .read_element(tx_kernel_context, Felt::from(addr), &err_ctx) + .read_element(tx_kernel_context, Felt::from(addr)) .expect("address converted from u32 should be in bounds") } } @@ -148,7 +138,9 @@ fn setup_test() -> anyhow::Result { ); let account = builder.add_existing_wallet_with_assets( - crate::Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset_0_double_amount, fungible_asset_1], )?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_account.rs b/crates/miden-testing/src/kernel_tests/tx/test_account.rs index 8efb0eb8f3..0901eee831 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_account.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_account.rs @@ -1,10 +1,13 @@ +use alloc::string::String; use alloc::sync::Arc; use alloc::vec::Vec; use std::collections::BTreeMap; use anyhow::Context; use assert_matches::assert_matches; +use miden_crypto::rand::test_utils::rand_value; use miden_processor::{ExecutionError, Word}; +use miden_protocol::LexicographicWord; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::delta::AccountUpdateDetails; @@ -18,6 +21,7 @@ use miden_protocol::account::{ AccountStorageMode, AccountType, StorageMap, + StorageMapKey, StorageSlot, StorageSlotContent, StorageSlotDelta, @@ -28,7 +32,7 @@ use miden_protocol::account::{ use miden_protocol::assembly::diagnostics::NamedSource; use miden_protocol::assembly::diagnostics::reporting::PrintDiagnostic; use miden_protocol::assembly::{DefaultSourceManager, Library}; -use miden_protocol::asset::{Asset, FungibleAsset}; +use miden_protocol::asset::{Asset, AssetCallbacks, FungibleAsset}; use miden_protocol::errors::tx_kernel::{ ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO, ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO, @@ -38,6 +42,7 @@ use miden_protocol::errors::tx_kernel::{ ERR_ACCOUNT_NONCE_CAN_ONLY_BE_INCREMENTED_ONCE, ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME, }; +use miden_protocol::field::PrimeField64; use miden_protocol::note::NoteType; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, @@ -48,16 +53,15 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_SENDER, }; use miden_protocol::testing::storage::{MOCK_MAP_SLOT, MOCK_VALUE_SLOT0, MOCK_VALUE_SLOT1}; -use miden_protocol::transaction::{OutputNote, TransactionKernel}; +use miden_protocol::transaction::{RawOutputNote, TransactionKernel}; use miden_protocol::utils::sync::LazyLock; -use miden_protocol::{LexicographicWord, StarkField}; +use miden_standards::account::faucets::BasicFungibleFaucet; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::mock_account::MockAccountExt; use miden_tx::LocalTransactionProver; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; -use winter_rand_utils::rand_value; use super::{Felt, StackInputs, ZERO}; use crate::executor::CodeExecutor; @@ -81,7 +85,7 @@ pub async fn compute_commitment() -> anyhow::Result<()> { // Precompute a commitment to a changed account so we can assert it during tx script execution. let mut account_clone = account.clone(); - let key = Word::from([1, 2, 3, 4u32]); + let key = StorageMapKey::from_array([1, 2, 3, 4]); let value = Word::from([2, 3, 4, 5u32]); let mock_map_slot = &*MOCK_MAP_SLOT; account_clone.storage_mut().set_map_item(mock_map_slot, key, value).unwrap(); @@ -116,7 +120,7 @@ pub async fn compute_commitment() -> anyhow::Result<()> { push.{value} push.{key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE, pad(7)] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE, pad(7)] call.mock_account::set_map_item dropw dropw dropw dropw # => [STORAGE_COMMITMENT0] @@ -194,12 +198,12 @@ async fn test_account_type() -> anyhow::Result<()> { ); let exec_output = CodeExecutor::with_default_host() - .stack_inputs(StackInputs::new(vec![account_id.prefix().as_felt()])?) + .stack_inputs(StackInputs::new(&[account_id.prefix().as_felt()])?) .run(&code) .await?; let type_matches = account_id.account_type() == expected_type; - let expected_result = Felt::from(type_matches); + let expected_result = if type_matches { Felt::ONE } else { Felt::ZERO }; has_type |= type_matches; assert_eq!( @@ -251,8 +255,8 @@ async fn test_account_validate_id() -> anyhow::Result<()> { for (account_id, expected_error) in test_cases.iter() { // Manually split the account ID into prefix and suffix since we can't use AccountId methods // on invalid ids. - let prefix = Felt::try_from((account_id / (1u128 << 64)) as u64).unwrap(); - let suffix = Felt::try_from((account_id % (1u128 << 64)) as u64).unwrap(); + let prefix = Felt::try_from((account_id / (1u128 << 64)) as u64)?; + let suffix = Felt::try_from((account_id % (1u128 << 64)) as u64)?; let code = " use $kernel::account_id @@ -263,7 +267,7 @@ async fn test_account_validate_id() -> anyhow::Result<()> { "; let result = CodeExecutor::with_default_host() - .stack_inputs(StackInputs::new(vec![suffix, prefix]).unwrap()) + .stack_inputs(StackInputs::new(&[suffix, prefix]).unwrap()) .run(code) .await; @@ -272,7 +276,17 @@ async fn test_account_validate_id() -> anyhow::Result<()> { (Ok(_), Some(err)) => { anyhow::bail!("expected error {err} but validation was successful") }, - (Err(ExecutionError::FailedAssertion { err_code, err_msg, .. }), Some(err)) => { + ( + Err(ExecutionError::OperationError { + err: + miden_processor::operation::OperationError::FailedAssertion { + err_code, + err_msg, + }, + .. + }), + Some(err), + ) => { if err_code != err.code() { anyhow::bail!( "actual error \"{}\" (code: {err_code}) did not match expected error {err}", @@ -389,7 +403,7 @@ async fn test_get_item() -> anyhow::Result<()> { # push the account storage item index push.SLOT_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix] + # => [slot_id_suffix, slot_id_prefix] # assert the item value is correct exec.account::get_item @@ -455,7 +469,7 @@ async fn test_get_map_item() -> anyhow::Result<()> { } #[tokio::test] -async fn test_get_storage_slot_type() -> anyhow::Result<()> { +async fn test_get_native_storage_slot_type() -> anyhow::Result<()> { for slot_name in [ AccountStorage::mock_value_slot0().name(), AccountStorage::mock_value_slot1().name(), @@ -483,7 +497,7 @@ async fn test_get_storage_slot_type() -> anyhow::Result<()> { push.{slot_idx} # get the type of the respective storage slot - exec.account::get_storage_slot_type + exec.account::get_native_storage_slot_type # truncate the stack swap drop @@ -496,28 +510,16 @@ async fn test_get_storage_slot_type() -> anyhow::Result<()> { assert_eq!( slot.slot_type(), StorageSlotType::try_from( - u8::try_from(exec_output.get_stack_element(0).as_int()).unwrap() + u8::try_from(exec_output.get_stack_element(0).as_canonical_u64()).unwrap() ) .unwrap() ); assert_eq!(exec_output.get_stack_element(1), ZERO, "the rest of the stack is empty"); assert_eq!(exec_output.get_stack_element(2), ZERO, "the rest of the stack is empty"); assert_eq!(exec_output.get_stack_element(3), ZERO, "the rest of the stack is empty"); - assert_eq!( - exec_output.get_stack_word_be(4), - Word::empty(), - "the rest of the stack is empty" - ); - assert_eq!( - exec_output.get_stack_word_be(8), - Word::empty(), - "the rest of the stack is empty" - ); - assert_eq!( - exec_output.get_stack_word_be(12), - Word::empty(), - "the rest of the stack is empty" - ); + assert_eq!(exec_output.get_stack_word(4), Word::empty(), "the rest of the stack is empty"); + assert_eq!(exec_output.get_stack_word(8), Word::empty(), "the rest of the stack is empty"); + assert_eq!(exec_output.get_stack_word(12), Word::empty(), "the rest of the stack is empty"); } Ok(()) @@ -533,8 +535,9 @@ async fn test_account_get_item_fails_on_unknown_slot() -> anyhow::Result<()> { let account_empty_storage = builder.add_existing_mock_account(Auth::IncrNonce)?; assert_eq!(account_empty_storage.storage().num_slots(), 0); - let account_non_empty_storage = builder - .add_existing_mock_account(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account_non_empty_storage = builder.add_existing_mock_account(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; assert_eq!(account_non_empty_storage.storage().num_slots(), 2); let chain = builder.build()?; @@ -608,8 +611,8 @@ async fn test_is_slot_id_lt() -> anyhow::Result<()> { use $kernel::account begin - push.{curr_suffix}.{curr_prefix}.{prev_suffix}.{prev_prefix} - # => [prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_prefix, curr_slot_id_suffix] + push.{curr_prefix}.{curr_suffix}.{prev_prefix}.{prev_suffix} + # => [prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_suffix, curr_slot_id_prefix] exec.account::is_slot_id_lt # => [is_slot_id_lt] @@ -653,7 +656,7 @@ async fn test_set_item() -> anyhow::Result<()> { # set the storage item push.{new_value} push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix, NEW_VALUE] + # => [slot_id_suffix, slot_id_prefix, NEW_VALUE] exec.account::set_item @@ -663,7 +666,7 @@ async fn test_set_item() -> anyhow::Result<()> { # assert new value has been correctly set push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix] + # => [slot_id_suffix, slot_id_prefix] exec.account::get_item push.{new_value} @@ -679,8 +682,10 @@ async fn test_set_item() -> anyhow::Result<()> { #[tokio::test] async fn test_set_map_item() -> anyhow::Result<()> { - let (new_key, new_value) = - (Word::from([109, 110, 111, 112u32]), Word::from([9, 10, 11, 12u32])); + let (new_key, new_value) = ( + StorageMapKey::from_array([109, 110, 111, 112u32]), + Word::from([9, 10, 11, 12u32]), + ); let slot = AccountStorage::mock_map_slot(); let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) @@ -711,11 +716,11 @@ async fn test_set_map_item() -> anyhow::Result<()> { # double check that the storage slot is indeed the new map push.SLOT_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, OLD_VALUE] + # => [slot_id_suffix, slot_id_prefix, OLD_VALUE] # pad the stack repeat.14 push.0 movdn.2 end - # => [slot_id_prefix, slot_id_suffix, pad(14), OLD_VALUE] + # => [slot_id_suffix, slot_id_prefix, pad(14), OLD_VALUE] call.mock_account::get_item # => [MAP_ROOT, pad(12), OLD_VALUE] @@ -739,7 +744,7 @@ async fn test_set_map_item() -> anyhow::Result<()> { assert_eq!( new_storage_map.root(), - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), "get_item should return the updated root", ); @@ -749,7 +754,7 @@ async fn test_set_map_item() -> anyhow::Result<()> { }; assert_eq!( old_value_for_key, - exec_output.get_stack_word_be(4), + exec_output.get_stack_word(4), "set_map_item must return the old value for the key (empty word for new key)", ); @@ -816,16 +821,16 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { let init_storage_commitment = account_storage.to_commitment(); let mock_value_slot0 = &*MOCK_VALUE_SLOT0; + let value_slot0 = Word::from([9, 10, 11, 12u32]); + let mock_map_slot = &*MOCK_MAP_SLOT; + let map_key = StorageMapKey::from_array([101, 102, 103, 104u32]); + let map_value = Word::from([5, 6, 7, 8u32]); - account_storage.set_item(mock_value_slot0, [9, 10, 11, 12].map(Felt::new).into())?; + account_storage.set_item(mock_value_slot0, value_slot0)?; let storage_commitment_value = account_storage.to_commitment(); - account_storage.set_map_item( - mock_map_slot, - [101, 102, 103, 104].map(Felt::new).into(), - [5, 6, 7, 8].map(Felt::new).into(), - )?; + account_storage.set_map_item(mock_map_slot, map_key, map_value)?; let storage_commitment_map = account_storage.to_commitment(); let code = format!( @@ -845,7 +850,7 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { assert_eqw.err="storage commitment at the beginning of the transaction is not equal to the expected one" # update the value storage slot - push.9.10.11.12 + push.{value_slot0} push.MOCK_VALUE_SLOT0[0..2] call.mock_account::set_item dropw drop # => [] @@ -862,9 +867,10 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { assert_eqw.err="storage commitment should remain the same" # update the map storage slot - push.5.6.7.8.101.102.103.104 + push.{map_value} + push.{map_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.mock_account::set_map_item dropw dropw # => [] @@ -896,7 +902,7 @@ async fn prove_account_creation_with_non_empty_storage() -> anyhow::Result<()> { let slot1 = StorageSlot::with_value(slot_name1.clone(), Word::from([10, 20, 30, 40u32])); let mut map_entries = Vec::new(); for _ in 0..10 { - map_entries.push((rand_value::(), rand_value::())); + map_entries.push((StorageMapKey::from_raw(rand_value::()), rand_value::())); } let map_slot = StorageSlot::with_map(slot_name2.clone(), StorageMap::with_entries(map_entries.clone())?); @@ -946,7 +952,7 @@ async fn prove_account_creation_with_non_empty_storage() -> anyhow::Result<()> { assert!(tx.account_delta().vault().is_empty()); assert_eq!(tx.final_account().nonce(), Felt::new(1)); - let proven_tx = LocalTransactionProver::default().prove(tx.clone())?; + let proven_tx = LocalTransactionProver::default().prove(tx.clone()).await?; // The delta should be present on the proven tx. let AccountUpdateDetails::Delta(delta) = proven_tx.account_update().details() else { @@ -1006,8 +1012,10 @@ async fn test_get_vault_root() -> anyhow::Result<()> { exec.prologue::prepare_transaction # add an asset to the account - push.{fungible_asset} - call.mock_account::add_asset dropw + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} + call.mock_account::add_asset + dropw dropw # => [] # get the current vault root @@ -1016,7 +1024,8 @@ async fn test_get_vault_root() -> anyhow::Result<()> { assert_eqw.err="vault root mismatch" end "#, - fungible_asset = Word::from(&fungible_asset), + FUNGIBLE_ASSET_VALUE = fungible_asset.to_value_word(), + FUNGIBLE_ASSET_KEY = fungible_asset.to_key_word(), expected_vault_root = &account.vault().root(), ); tx_context.execute_code(&code).await?; @@ -1046,7 +1055,9 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { FungibleAsset::new(faucet_existing_asset, 10).context("fungible_asset_0 is invalid")?, ); let account = builder.add_existing_wallet_with_assets( - crate::Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset_for_account], )?; @@ -1088,17 +1099,17 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { begin # push faucet ID prefix and suffix - push.{suffix}.{prefix} - # => [faucet_id_prefix, faucet_id_suffix] + push.{prefix}.{suffix} + # => [faucet_id_suffix, faucet_id_prefix] # get the current asset balance dup.1 dup.1 exec.active_account::get_balance - # => [final_balance, faucet_id_prefix, faucet_id_suffix] + # => [final_balance, faucet_id_suffix, faucet_id_prefix] # assert final balance is correct push.{final_balance} assert_eq.err="final balance is incorrect" - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] # get the initial asset balance exec.active_account::get_initial_balance @@ -1112,7 +1123,7 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { suffix = faucet_existing_asset.suffix(), prefix = faucet_existing_asset.prefix().as_felt(), final_balance = - initial_balance + fungible_asset_for_note_existing.unwrap_fungible().amount(), + initial_balance + fungible_asset_for_note_existing.unwrap_fungible().amount().inner(), ); let tx_script = CodeBuilder::default().compile_tx_script(add_existing_source)?; @@ -1142,17 +1153,17 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { begin # push faucet ID prefix and suffix - push.{suffix}.{prefix} - # => [faucet_id_prefix, faucet_id_suffix] + push.{prefix}.{suffix} + # => [faucet_id_suffix, faucet_id_prefix] # get the current asset balance dup.1 dup.1 exec.active_account::get_balance - # => [final_balance, faucet_id_prefix, faucet_id_suffix] + # => [final_balance, faucet_id_suffix, faucet_id_prefix] # assert final balance is correct push.{final_balance} assert_eq.err="final balance is incorrect" - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] # get the initial asset balance exec.active_account::get_initial_balance @@ -1165,7 +1176,8 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { "#, suffix = faucet_new_asset.suffix(), prefix = faucet_new_asset.prefix().as_felt(), - final_balance = initial_balance + fungible_asset_for_note_new.unwrap_fungible().amount(), + final_balance = + initial_balance + fungible_asset_for_note_new.unwrap_fungible().amount().inner(), ); let tx_script = CodeBuilder::default().compile_tx_script(add_new_source)?; @@ -1196,7 +1208,9 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { FungibleAsset::new(faucet_existing_asset, 10).context("fungible_asset_0 is invalid")?, ); let account = builder.add_existing_wallet_with_assets( - crate::Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset_for_account], )?; @@ -1221,42 +1235,28 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { use miden::standards::wallets::basic->wallet use mock::util - # Inputs: [ASSET, note_idx] - # Outputs: [ASSET, note_idx] - proc move_asset_to_note - # pad the stack before call - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [ASSET, note_idx, pad(11)] - - call.wallet::move_asset_to_note - # => [ASSET, note_idx, pad(11)] - - # remove excess PADs from the stack - swapdw dropw dropw swapw movdn.7 drop drop drop - # => [ASSET, note_idx] - end - begin # create random note and move the asset into it exec.util::create_default_note # => [note_idx] - push.{REMOVED_ASSET} - exec.move_asset_to_note dropw drop + push.{REMOVED_ASSET_VALUE} + push.{REMOVED_ASSET_KEY} + exec.util::move_asset_to_note # => [] # push faucet ID prefix and suffix - push.{suffix}.{prefix} - # => [faucet_id_prefix, faucet_id_suffix] + push.{prefix}.{suffix} + # => [faucet_id_suffix, faucet_id_prefix] # get the current asset balance dup.1 dup.1 exec.active_account::get_balance - # => [final_balance, faucet_id_prefix, faucet_id_suffix] + # => [final_balance, faucet_id_suffix, faucet_id_prefix] # assert final balance is correct push.{final_balance} assert_eq.err="final balance is incorrect" - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] # get the initial asset balance exec.active_account::get_initial_balance @@ -1267,11 +1267,12 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { assert_eq.err="initial balance is incorrect" end "#, - REMOVED_ASSET = Word::from(fungible_asset_for_note_existing), + REMOVED_ASSET_KEY = fungible_asset_for_note_existing.to_key_word(), + REMOVED_ASSET_VALUE = fungible_asset_for_note_existing.to_value_word(), suffix = faucet_existing_asset.suffix(), prefix = faucet_existing_asset.prefix().as_felt(), final_balance = - initial_balance - fungible_asset_for_note_existing.unwrap_fungible().amount(), + initial_balance - fungible_asset_for_note_existing.unwrap_fungible().amount().inner(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(remove_existing_source)?; @@ -1279,7 +1280,7 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(TxContextInput::AccountId(account.id()), &[], &[])? .tx_script(tx_script) - .extend_expected_output_notes(vec![OutputNote::Full(expected_output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(expected_output_note)]) .build()?; tx_context.execute().await?; @@ -1303,7 +1304,9 @@ async fn test_get_init_asset() -> anyhow::Result<()> { FungibleAsset::new(faucet_existing_asset, 10).context("fungible_asset_0 is invalid")?, ); let account = builder.add_existing_wallet_with_assets( - crate::Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset_for_account], )?; @@ -1332,13 +1335,14 @@ async fn test_get_init_asset() -> anyhow::Result<()> { exec.util::create_default_note # => [note_idx] - push.{REMOVED_ASSET} - call.wallet::move_asset_to_note dropw drop + push.{REMOVED_ASSET_VALUE} + push.{ASSET_KEY} + exec.util::move_asset_to_note # => [] # get the current asset push.{ASSET_KEY} exec.active_account::get_asset - # => [ASSET] + # => [ASSET_VALUE] push.{FINAL_ASSET} assert_eqw.err="final asset is incorrect" @@ -1348,14 +1352,14 @@ async fn test_get_init_asset() -> anyhow::Result<()> { push.{ASSET_KEY} exec.active_account::get_initial_asset # => [INITIAL_ASSET] - push.{INITIAL_ASSET} + push.{INITIAL_ASSET_VALUE} assert_eqw.err="initial asset is incorrect" end "#, - ASSET_KEY = fungible_asset_for_note_existing.vault_key(), - REMOVED_ASSET = Word::from(fungible_asset_for_note_existing), - INITIAL_ASSET = Word::from(fungible_asset_for_account), - FINAL_ASSET = Word::from(final_asset), + ASSET_KEY = fungible_asset_for_note_existing.to_key_word(), + REMOVED_ASSET_VALUE = fungible_asset_for_note_existing.to_value_word(), + INITIAL_ASSET_VALUE = fungible_asset_for_account.to_value_word(), + FINAL_ASSET = final_asset.to_value_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(remove_existing_source)?; @@ -1363,7 +1367,7 @@ async fn test_get_init_asset() -> anyhow::Result<()> { mock_chain .build_tx_context(TxContextInput::AccountId(account.id()), &[], &[])? .tx_script(tx_script) - .extend_expected_output_notes(vec![OutputNote::Full(expected_output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(expected_output_note)]) .build()? .execute() .await?; @@ -1504,6 +1508,7 @@ async fn test_was_procedure_called() -> anyhow::Result<()> { /// `tx script -> account code -> external library` #[tokio::test] async fn transaction_executor_account_code_using_custom_library() -> anyhow::Result<()> { + let slot_value = Word::from([2, 3, 4, 5u32]); let external_library_code = format!( r#" use miden::protocol::native_account @@ -1511,7 +1516,7 @@ async fn transaction_executor_account_code_using_custom_library() -> anyhow::Res const MOCK_VALUE_SLOT0 = word("{mock_value_slot0}") pub proc external_setter - push.2.3.4.5 + push.{slot_value} push.MOCK_VALUE_SLOT0[0..2] exec.native_account::set_item dropw dropw @@ -1585,7 +1590,7 @@ async fn transaction_executor_account_code_using_custom_library() -> anyhow::Res assert_eq!(executed_tx.account_delta().storage().values().count(), 1); assert_eq!( executed_tx.account_delta().storage().get(&MOCK_VALUE_SLOT0).unwrap(), - &StorageSlotDelta::Value(Word::from([2, 3, 4, 5u32])), + &StorageSlotDelta::Value(slot_value), ); Ok(()) } @@ -1596,6 +1601,7 @@ async fn incrementing_nonce_twice_fails() -> anyhow::Result<()> { let source_code = " use miden::protocol::native_account + @auth_script pub proc auth_incr_nonce_twice exec.native_account::incr_nonce drop exec.native_account::incr_nonce drop @@ -1674,6 +1680,65 @@ async fn test_has_procedure() -> anyhow::Result<()> { Ok(()) } +/// Tests that the `has_callbacks` faucet procedure correctly reports whether a faucet defines +/// callbacks. +/// +/// - `with_callbacks`: callback slot has a non-empty value -> returns 1 +/// - `with_empty_callback`: callback slot exists but value is the empty word -> returns 0 +/// - `without_callbacks`: no callback slot at all -> returns 0 +#[rstest::rstest] +#[case::with_callbacks( + vec![StorageSlot::with_value( + AssetCallbacks::on_before_asset_added_to_account_slot().clone(), + Word::from([1, 2, 3, 4u32]), + )], + true, +)] +#[case::with_empty_callback( + vec![StorageSlot::with_empty_value( + AssetCallbacks::on_before_asset_added_to_account_slot().clone(), + )], + false, +)] +#[case::without_callbacks(vec![], false)] +#[tokio::test] +async fn test_faucet_has_callbacks( + #[case] callback_slots: Vec, + #[case] expected_has_callbacks: bool, +) -> anyhow::Result<()> { + let basic_faucet = BasicFungibleFaucet::new("CBK".try_into()?, 8, Felt::new(1_000_000))?; + + let account = AccountBuilder::new([1u8; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(AccountType::FungibleFaucet) + .with_component(basic_faucet) + .with_component(MockAccountComponent::with_slots(callback_slots)) + .with_auth_component(Auth::IncrNonce) + .build_existing()?; + + let tx_script_code = format!( + r#" + use miden::protocol::faucet + + begin + exec.faucet::has_callbacks + push.{has_callbacks} + assert_eq.err="has_callbacks returned unexpected value" + end + "#, + has_callbacks = u8::from(expected_has_callbacks) + ); + let tx_script = CodeBuilder::default().compile_tx_script(&tx_script_code)?; + + TransactionContextBuilder::new(account) + .tx_script(tx_script) + .build()? + .execute() + .await?; + + Ok(()) +} + // ACCOUNT INITIAL STORAGE TESTS // ================================================================================================ @@ -1806,6 +1871,86 @@ async fn test_get_initial_map_item() -> anyhow::Result<()> { Ok(()) } +/// Tests that `get_initial_item` returns the original slot values and `get_item` returns updated +/// values after modification, for all possible storage slot indices. +#[tokio::test] +async fn test_get_item_and_get_initial_item_for_all_slots() -> anyhow::Result<()> { + // Build storage slots for all valid indices. + let slots: Vec = (0..AccountStorage::MAX_NUM_STORAGE_SLOTS as u32) + .map(|index| { + StorageSlot::with_value( + StorageSlotName::mock(index as usize), + Word::from([0, 0, 0, index]), + ) + }) + .collect(); + + let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) + .with_auth_component(Auth::IncrNonce) + .with_component(MockAccountComponent::with_slots(slots.clone())) + .build_existing() + .unwrap(); + + let tx_context = TransactionContextBuilder::new(account).build().unwrap(); + + // Build MASM code that, for each slot: + // 1. Sets a new value [index, 0, 0, 0] + // 2. Asserts get_initial_item returns the original value [0, 0, 0, index] + // 3. Asserts get_item returns the new value [index, 0, 0, 0] + let mut slot_constants = String::new(); + let mut slot_operations = String::new(); + + for (index, slot) in slots.iter().enumerate() { + let slot_name = slot.name(); + let initial_value = slot.value(); + // Use a different format than the initial value (index at word position 0). + let new_value = Word::from([index as u32, 0, 0, 0]); + let const_name = format!("SLOT_{index}"); + + slot_constants.push_str(&format!("const {const_name} = word(\"{slot_name}\")\n")); + + slot_operations.push_str(&format!( + r#" + # slot {index}: set new value + push.{new_value} + push.{const_name}[0..2] + call.mock_account::set_item dropw drop drop + + # slot {index}: assert get_initial_item returns original value + push.{const_name}[0..2] + exec.account::get_initial_item + push.{initial_value} + assert_eqw.err="slot {index}: initial value mismatch" + + # slot {index}: assert get_item returns the new value + push.{const_name}[0..2] + exec.account::get_item + push.{new_value} + assert_eqw.err="slot {index}: current value mismatch" + "#, + )); + } + + let code = format!( + r#" + use $kernel::account + use $kernel::prologue + use mock::account->mock_account + + {slot_constants} + + begin + exec.prologue::prepare_transaction + {slot_operations} + end + "#, + ); + + tx_context.execute_code(&code).await?; + + Ok(()) +} + /// Tests that incrementing the account nonce fails if it would overflow the field. #[tokio::test] async fn incrementing_nonce_overflow_fails() -> anyhow::Result<()> { @@ -1816,7 +1961,7 @@ async fn incrementing_nonce_overflow_fails() -> anyhow::Result<()> { .context("failed to build account")?; // Increment the nonce to the maximum felt value. The nonce is already 1, so we increment by // modulus - 2. - account.increment_nonce(Felt::new(Felt::MODULUS - 2))?; + account.increment_nonce(Felt::new(Felt::ORDER_U64 - 2))?; let result = TransactionContextBuilder::new(account).build()?.execute().await; @@ -1872,7 +2017,7 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> end pub proc set_slot_content - push.5.6.7.8 + push.[5,6,7,8] push.TEST_SLOT_NAME[0..2] exec.native_account::set_item swapw dropw @@ -1915,7 +2060,8 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> } } - let slot = StorageSlot::with_value(TEST_SLOT_NAME.clone(), Word::from([1, 2, 3, 4u32])); + let slot_content1 = Word::from([1, 2, 3, 4u32]); + let slot = StorageSlot::with_value(TEST_SLOT_NAME.clone(), slot_content1); let account = AccountBuilder::new([42; 32]) .with_auth_component(Auth::IncrNonce) @@ -1924,22 +2070,24 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> .build() .context("failed to build account")?; - let tx_script = r#" + let tx_script = format!( + r#" use component1::interface->comp1_interface use component2::interface->comp2_interface begin call.comp1_interface::get_slot_content - push.1.2.3.4 + push.{slot_content1} assert_eqw.err="failed to get slot content1" call.comp2_interface::set_slot_content call.comp2_interface::get_slot_content - push.5.6.7.8 + push.[5,6,7,8] assert_eqw.err="failed to get slot content2" end - "#; + "# + ); let tx_script = CodeBuilder::default() .with_dynamically_linked_library(COMPONENT_1_LIBRARY.clone())? diff --git a/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs b/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs index 4c574a6bf8..a872bcd900 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs @@ -3,6 +3,7 @@ use std::collections::BTreeMap; use std::string::String; use anyhow::Context; +use miden_crypto::rand::test_utils::rand_value; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{ Account, @@ -13,11 +14,18 @@ use miden_protocol::account::{ AccountStorageMode, AccountType, StorageMap, + StorageMapKey, StorageSlot, StorageSlotDelta, StorageSlotName, }; -use miden_protocol::asset::{Asset, AssetVault, FungibleAsset, NonFungibleAsset}; +use miden_protocol::asset::{ + Asset, + AssetVault, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, +}; use miden_protocol::note::{Note, NoteTag, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, @@ -27,7 +35,6 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_SENDER, AccountIdBuilder, }; -use miden_protocol::testing::asset::NonFungibleAssetBuilder; use miden_protocol::testing::constants::{ CONSUMED_ASSET_1_AMOUNT, CONSUMED_ASSET_3_AMOUNT, @@ -37,13 +44,12 @@ use miden_protocol::testing::constants::{ }; use miden_protocol::testing::storage::{MOCK_MAP_SLOT, MOCK_VALUE_SLOT0}; use miden_protocol::transaction::TransactionScript; -use miden_protocol::{EMPTY_WORD, Felt, FieldElement, LexicographicWord, Word, ZERO}; +use miden_protocol::{EMPTY_WORD, Felt, LexicographicWord, Word, ZERO}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_tx::LocalTransactionProver; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; -use winter_rand_utils::rand_value; use crate::utils::create_public_p2any_note; use crate::{Auth, MockChain, TransactionContextBuilder}; @@ -147,37 +153,37 @@ async fn storage_delta_for_value_slots() -> anyhow::Result<()> { begin push.{slot_0_tmp_value} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_0_final_value} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_1_final_value} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_2_final_value} push.SLOT_2_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_3_tmp_value} push.SLOT_3_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_3_final_value} push.SLOT_3_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] end @@ -223,12 +229,12 @@ async fn storage_delta_for_value_slots() -> anyhow::Result<()> { async fn storage_delta_for_map_slots() -> anyhow::Result<()> { // Test with random keys to make sure the ordering in the MASM and Rust implementations // matches. - let key0 = rand_value::(); - let key1 = rand_value::(); - let key2 = rand_value::(); - let key3 = rand_value::(); - let key4 = rand_value::(); - let key5 = rand_value::(); + let key0 = StorageMapKey::from_raw(rand_value::()); + let key1 = StorageMapKey::from_raw(rand_value::()); + let key2 = StorageMapKey::from_raw(rand_value::()); + let key3 = StorageMapKey::from_raw(rand_value::()); + let key4 = StorageMapKey::from_raw(rand_value::()); + let key5 = StorageMapKey::from_raw(rand_value::()); let key0_init_value = EMPTY_WORD; let key1_init_value = EMPTY_WORD; @@ -285,55 +291,55 @@ async fn storage_delta_for_map_slots() -> anyhow::Result<()> { begin push.{key0_final_value} push.{key0} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key1_tmp_value} push.{key1} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key1_final_value} push.{key1} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key2_final_value} push.{key2} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key3_final_value} push.{key3} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key4_tmp_value} push.{key4} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key4_final_value} push.{key4} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key5_tmp_value} push.{key5} push.SLOT_2_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key5_final_value} push.{key5} push.SLOT_2_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] end @@ -427,20 +433,31 @@ async fn fungible_asset_delta() -> anyhow::Result<()> { let tx_script = parse_tx_script(format!( " begin - push.{asset0} exec.create_note_with_asset + push.{ASSET0_VALUE} push.{ASSET0_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset1} exec.create_note_with_asset + + push.{ASSET1_VALUE} push.{ASSET1_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset2} exec.create_note_with_asset + + push.{ASSET2_VALUE} push.{ASSET2_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset3} exec.create_note_with_asset + + push.{ASSET3_VALUE} push.{ASSET3_KEY} + exec.util::create_default_note_with_moved_asset # => [] end ", - asset0 = Word::from(removed_asset0), - asset1 = Word::from(removed_asset1), - asset2 = Word::from(removed_asset2), - asset3 = Word::from(removed_asset3), + ASSET0_KEY = removed_asset0.to_key_word(), + ASSET0_VALUE = removed_asset0.to_value_word(), + ASSET1_KEY = removed_asset1.to_key_word(), + ASSET1_VALUE = removed_asset1.to_value_word(), + ASSET2_KEY = removed_asset2.to_key_word(), + ASSET2_VALUE = removed_asset2.to_value_word(), + ASSET3_KEY = removed_asset3.to_key_word(), + ASSET3_VALUE = removed_asset3.to_value_word(), ))?; let executed_tx = mock_chain @@ -455,13 +472,17 @@ async fn fungible_asset_delta() -> anyhow::Result<()> { .account_delta() .vault() .added_assets() - .map(|asset| (asset.unwrap_fungible().faucet_id(), asset.unwrap_fungible().amount())) + .map(|asset| { + (asset.unwrap_fungible().faucet_id(), asset.unwrap_fungible().amount().inner()) + }) .collect::>(); let mut removed_assets = executed_tx .account_delta() .vault() .removed_assets() - .map(|asset| (asset.unwrap_fungible().faucet_id(), asset.unwrap_fungible().amount())) + .map(|asset| { + (asset.unwrap_fungible().faucet_id(), asset.unwrap_fungible().amount().inner()) + }) .collect::>(); assert_eq!(added_assets.len(), 2); @@ -469,17 +490,20 @@ async fn fungible_asset_delta() -> anyhow::Result<()> { assert_eq!( added_assets.remove(&original_asset2.faucet_id()).unwrap(), - added_asset2.amount() - removed_asset2.amount() + added_asset2.amount().inner() - removed_asset2.amount().inner() + ); + assert_eq!( + added_assets.remove(&added_asset4.faucet_id()).unwrap(), + added_asset4.amount().inner() ); - assert_eq!(added_assets.remove(&added_asset4.faucet_id()).unwrap(), added_asset4.amount()); assert_eq!( removed_assets.remove(&original_asset0.faucet_id()).unwrap(), - removed_asset0.amount() - added_asset0.amount() + removed_asset0.amount().inner() - added_asset0.amount().inner() ); assert_eq!( removed_assets.remove(&original_asset3.faucet_id()).unwrap(), - removed_asset3.amount() + removed_asset3.amount().inner() ); Ok(()) @@ -508,10 +532,22 @@ async fn non_fungible_asset_delta() -> anyhow::Result<()> { .account_type(AccountType::NonFungibleFaucet) .build_with_seed(rng.random()); - let asset0 = NonFungibleAssetBuilder::new(faucet0.prefix(), &mut rng)?.build()?; - let asset1 = NonFungibleAssetBuilder::new(faucet1.prefix(), &mut rng)?.build()?; - let asset2 = NonFungibleAssetBuilder::new(faucet2.prefix(), &mut rng)?.build()?; - let asset3 = NonFungibleAssetBuilder::new(faucet3.prefix(), &mut rng)?.build()?; + let asset0 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet0, + rng.random::<[u8; 32]>().to_vec(), + )?)?; + let asset1 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet1, + rng.random::<[u8; 32]>().to_vec(), + )?)?; + let asset2 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet2, + rng.random::<[u8; 32]>().to_vec(), + )?)?; + let asset3 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet3, + rng.random::<[u8; 32]>().to_vec(), + )?)?; let TestSetup { mock_chain, account_id, notes } = setup_test([], [asset1, asset3].map(Asset::from), [asset0, asset2].map(Asset::from))?; @@ -519,22 +555,35 @@ async fn non_fungible_asset_delta() -> anyhow::Result<()> { let tx_script = parse_tx_script(format!( " begin - push.{asset1} exec.create_note_with_asset + push.{ASSET1_VALUE} push.{ASSET1_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset2} exec.create_note_with_asset + + push.{ASSET2_VALUE} push.{ASSET2_KEY} + exec.util::create_default_note_with_moved_asset # => [] - # remove and re-add asset 3 - push.{asset3} + # remove asset 3 + push.{ASSET3_VALUE} + push.{ASSET3_KEY} exec.remove_asset - # => [ASSET] + # => [REMAINING_ASSET_VALUE] + dropw + + # re-add asset 3 + push.{ASSET3_VALUE} + push.{ASSET3_KEY} + # => [ASSET_KEY, ASSET_VALUE] exec.add_asset dropw # => [] end ", - asset1 = Word::from(asset1), - asset2 = Word::from(asset2), - asset3 = Word::from(asset3), + ASSET1_KEY = asset1.to_key_word(), + ASSET1_VALUE = asset1.to_value_word(), + ASSET2_KEY = asset2.to_key_word(), + ASSET2_VALUE = asset2.to_value_word(), + ASSET3_KEY = asset3.to_key_word(), + ASSET3_VALUE = asset3.to_value_word(), ))?; let executed_tx = mock_chain @@ -549,20 +598,20 @@ async fn non_fungible_asset_delta() -> anyhow::Result<()> { .account_delta() .vault() .added_assets() - .map(|asset| (asset.faucet_id_prefix(), asset.unwrap_non_fungible())) + .map(|asset| (asset.faucet_id(), asset.unwrap_non_fungible())) .collect::>(); let mut removed_assets = executed_tx .account_delta() .vault() .removed_assets() - .map(|asset| (asset.faucet_id_prefix(), asset.unwrap_non_fungible())) + .map(|asset| (asset.faucet_id(), asset.unwrap_non_fungible())) .collect::>(); assert_eq!(added_assets.len(), 1); assert_eq!(removed_assets.len(), 1); - assert_eq!(added_assets.remove(&asset0.faucet_id_prefix()).unwrap(), asset0); - assert_eq!(removed_assets.remove(&asset1.faucet_id_prefix()).unwrap(), asset1); + assert_eq!(added_assets.remove(&asset0.faucet_id()).unwrap(), asset0); + assert_eq!(removed_assets.remove(&asset1.faucet_id()).unwrap(), asset1); Ok(()) } @@ -583,7 +632,7 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { let updated_slot_value = Word::from([7, 9, 11, 13u32]); // updated storage map - let updated_map_key = Word::from([14, 15, 16, 17u32]); + let updated_map_key = StorageMapKey::from_array([14, 15, 16, 17u32]); let updated_map_value = Word::from([18, 19, 20, 21u32]); // removed assets @@ -622,16 +671,19 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { # => [note_idx, pad(15)] # move an asset to the created note to partially deplete fungible asset balance - swapw dropw push.{REMOVED_ASSET} + swapw dropw + push.{REMOVED_ASSET_VALUE} + push.{REMOVED_ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - # => [ASSET, note_idx, pad(11)] + # => [pad(16)] # clear the stack dropw dropw dropw dropw ", NOTETYPE = note_types[i] as u8, tag = tags[i], - REMOVED_ASSET = Word::from(removed_assets[i]) + REMOVED_ASSET_KEY = removed_assets[i].to_key_word(), + REMOVED_ASSET_VALUE = removed_assets[i].to_value_word(), )); } @@ -654,7 +706,7 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { # get the index of account storage slot push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix, 13, 11, 9, 7] + # => [slot_id_suffix, slot_id_prefix, 13, 11, 9, 7] # update the storage value call.account::set_item dropw # => [] @@ -671,7 +723,7 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { # get the index of account storage slot push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, 14, 15, 16, 17, 18, 19, 20, 21] + # => [slot_id_suffix, slot_id_prefix, 14, 15, 16, 17, 18, 19, 20, 21] # update the storage value call.account::set_map_item dropw dropw dropw @@ -794,13 +846,13 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { async fn proven_tx_storage_maps_matches_executed_tx_for_new_account() -> anyhow::Result<()> { // Use two identical maps to test that they are properly handled // (see also https://github.com/0xMiden/protocol/issues/2037). - let map0 = StorageMap::with_entries([(rand_value(), rand_value())])?; + let map0 = StorageMap::with_entries([(StorageMapKey::from_raw(rand_value()), rand_value())])?; let map1 = map0.clone(); let mut map2 = StorageMap::with_entries([ - (rand_value(), rand_value()), - (rand_value(), rand_value()), - (rand_value(), rand_value()), - (rand_value(), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), ])?; let map0_slot_name = StorageSlotName::mock(1); @@ -835,7 +887,7 @@ async fn proven_tx_storage_maps_matches_executed_tx_for_new_account() -> anyhow: push.{value0} push.{existing_key} push.MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.account::set_map_item exec.::miden::core::sys::truncate_stack @@ -1083,13 +1135,14 @@ fn parse_tx_script(code: impl AsRef) -> anyhow::Result { const TEST_ACCOUNT_CONVENIENCE_WRAPPERS: &str = " use mock::account + use mock::util use miden::protocol::output_note - #! Inputs: [slot_id_prefix, slot_id_suffix, VALUE] + #! Inputs: [slot_id_suffix, slot_id_prefix, VALUE] #! Outputs: [] proc set_item repeat.10 push.0 movdn.6 end - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] + # => [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] call.account::set_item # => [OLD_VALUE, pad(12)] @@ -1097,7 +1150,7 @@ const TEST_ACCOUNT_CONVENIENCE_WRAPPERS: &str = " dropw dropw dropw dropw end - #! Inputs: [slot_id_prefix, slot_id_suffix, KEY, VALUE] + #! Inputs: [slot_id_suffix, slot_id_prefix, KEY, VALUE] #! Outputs: [] proc set_map_item repeat.6 push.0 movdn.10 end @@ -1110,59 +1163,29 @@ const TEST_ACCOUNT_CONVENIENCE_WRAPPERS: &str = " # => [] end - #! Inputs: [ASSET] - #! Outputs: [] - proc create_note_with_asset - push.0.1.2.3 # recipient - push.2 # note_type private - push.0xC0000000 # tag - # => [tag, note_type, RECIPIENT, ASSET] - - exec.output_note::create - # => [note_idx, ASSET] - - movdn.4 - # => [ASSET, note_idx] - - exec.move_asset_to_note - # => [] - end - - #! Inputs: [ASSET, note_idx] - #! Outputs: [] - proc move_asset_to_note - repeat.11 push.0 movdn.5 end - # => [ASSET, note_idx, pad(11)] - - call.account::move_asset_to_note - - # return values are unused - dropw dropw dropw dropw - end - - #! Inputs: [ASSET] - #! Outputs: [ASSET'] + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [ASSET_VALUE'] proc add_asset - repeat.12 push.0 movdn.4 end - # => [ASSET, pad(12)] + repeat.8 push.0 movdn.8 end + # => [ASSET_KEY, ASSET_VALUE, pad(8)] call.account::add_asset - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] repeat.12 movup.4 drop end - # => [ASSET'] + # => [ASSET_VALUE'] end - #! Inputs: [ASSET] - #! Outputs: [ASSET] + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [ASSET_VALUE] proc remove_asset - repeat.12 push.0 movdn.4 end - # => [ASSET, pad(12)] + padw padw swapdw + # => [ASSET_KEY, ASSET_VALUE, pad(8)] call.account::remove_asset - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] repeat.12 movup.4 drop end - # => [ASSET] + # => [ASSET_VALUE] end "; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs b/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs index 215cdb8a35..7ce54979cb 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs @@ -3,11 +3,12 @@ use alloc::vec::Vec; use assert_matches::assert_matches; use miden_processor::ExecutionError; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RandomCoin; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::field::PrimeField64; use miden_protocol::note::{ Note, NoteAssets, @@ -23,8 +24,8 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; -use miden_protocol::transaction::{InputNote, OutputNote, TransactionKernel}; -use miden_protocol::{Felt, StarkField, Word}; +use miden_protocol::transaction::{InputNote, RawOutputNote, TransactionKernel}; +use miden_protocol::{Felt, Word}; use miden_standards::note::{ NoteConsumptionStatus, P2idNote, @@ -56,7 +57,7 @@ async fn check_note_consumability_standard_notes_success() -> anyhow::Result<()> vec![FungibleAsset::mock(10)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([2u32; 4])), + &mut RandomCoin::new(Word::from([2u32; 4])), )?; let p2ide_note = P2ideNote::create( @@ -69,7 +70,7 @@ async fn check_note_consumability_standard_notes_success() -> anyhow::Result<()> vec![FungibleAsset::mock(10)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([2u32; 4])), + &mut RandomCoin::new(Word::from([2u32; 4])), )?; let notes = vec![p2id_note, p2ide_note]; @@ -113,8 +114,10 @@ async fn check_note_consumability_custom_notes_success( let account = Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); - let (_, authenticator) = - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }.build_component(); + let (_, authenticator) = Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + } + .build_component(); TransactionContextBuilder::new(account) .extend_input_notes(notes.clone()) .authenticator(authenticator) @@ -228,7 +231,7 @@ async fn check_note_consumability_partial_success() -> anyhow::Result<()> { FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::DivideByZero { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::DivideByZero, .. }) } => { assert_eq!( note.id(), @@ -242,7 +245,7 @@ async fn check_note_consumability_partial_success() -> anyhow::Result<()> { FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::DivideByZero { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::DivideByZero, .. }) } => { assert_eq!( note.id(), @@ -265,8 +268,9 @@ async fn check_note_consumability_epilogue_failure() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Use basic auth which will cause epilogue failure when paired up with unreachable auth. - let account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let successful_note = builder.add_p2id_note( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), @@ -350,7 +354,7 @@ async fn check_note_consumability_epilogue_failure_with_new_combination() -> any let fail_epilogue_note = NoteBuilder::new(account.id(), &mut rand::rng()) .add_assets([Asset::from(note_asset)]) .build()?; - builder.add_output_note(OutputNote::Full(fail_epilogue_note.clone())); + builder.add_output_note(RawOutputNote::Full(fail_epilogue_note.clone())); let mock_chain = builder.build()?; let notes = vec![ @@ -391,7 +395,7 @@ async fn check_note_consumability_epilogue_failure_with_new_combination() -> any FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::DivideByZero { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::DivideByZero, .. }) } => { assert_eq!( note.id(), @@ -405,7 +409,7 @@ async fn check_note_consumability_epilogue_failure_with_new_combination() -> any FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::FailedAssertion { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::FailedAssertion { .. }, .. }) } => { assert_eq!( note.id(), @@ -428,8 +432,9 @@ async fn test_check_note_consumability_without_signatures() -> anyhow::Result<() let mut builder = MockChain::builder(); // Use basic auth which will cause epilogue failure when paired up with unreachable auth. - let account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let successful_note = builder.add_p2id_note( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), @@ -484,15 +489,20 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho let p2ide_invalid_target_id = create_p2ide_note_with_storage([1, 2, 3, 4], sender_account_id); let p2ide_wrong_target = create_p2ide_note_with_storage( - [wrong_target_id.suffix().as_int(), wrong_target_id.prefix().as_u64(), 3, 4], + [ + wrong_target_id.suffix().as_canonical_u64(), + wrong_target_id.prefix().as_u64(), + 3, + 4, + ], sender_account_id, ); let p2ide_invalid_reclaim = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), - Felt::MODULUS - 1, + Felt::ORDER_U64 - 1, 4, ], sender_account_id, @@ -500,10 +510,10 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho let p2ide_invalid_timelock = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), 3, - Felt::MODULUS - 1, + Felt::ORDER_U64 - 1, ], sender_account_id, ); @@ -658,14 +668,14 @@ async fn test_check_note_consumability_static_analysis_receiver( let p2ide = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), reclaim_height, timelock_height, ], sender_account_id, ); - builder.add_output_note(OutputNote::Full(p2ide.clone())); + builder.add_output_note(RawOutputNote::Full(p2ide.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_until_block(3)?; @@ -748,14 +758,14 @@ async fn test_check_note_consumability_static_analysis_sender( let p2ide = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), reclaim_height, timelock_height, ], sender_account_id, ); - builder.add_output_note(OutputNote::Full(p2ide.clone())); + builder.add_output_note(RawOutputNote::Full(p2ide.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_until_block(3)?; @@ -795,7 +805,7 @@ fn create_p2ide_note_with_storage( storage: impl IntoIterator, sender: AccountId, ) -> Note { - let serial_num = RpoRandomCoin::new(Default::default()).draw_word(); + let serial_num = RandomCoin::new(Default::default()).draw_word(); let note_script = StandardNote::P2IDE.script(); let recipient = NoteRecipient::new( serial_num, diff --git a/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs index f700825b4e..de5171d810 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs @@ -4,7 +4,7 @@ use anyhow::Context; use miden_protocol::account::Account; use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::FungibleAsset; -use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; +use miden_protocol::crypto::rand::{FeltRng, RandomCoin}; use miden_protocol::errors::tx_kernel::ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_METADATA_WHILE_NO_NOTE_BEING_PROCESSED; use miden_protocol::note::{ Note, @@ -20,6 +20,7 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; +use miden_protocol::transaction::memory::{ASSET_SIZE, ASSET_VALUE_OFFSET}; use miden_protocol::{EMPTY_WORD, Felt, ONE, WORD_SIZE, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::mock_account::MockAccountExt; @@ -38,8 +39,9 @@ use crate::{ async fn test_active_note_get_sender_fails_from_tx_script() -> anyhow::Result<()> { // Creates a mockchain with an account and a note let mut builder = MockChain::builder(); - let account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -160,8 +162,8 @@ async fn test_active_note_get_sender() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(code).await?; let sender = tx_context.input_notes().get_note(0).note().metadata().sender(); - assert_eq!(exec_output.stack[0], sender.prefix().as_felt()); - assert_eq!(exec_output.stack[1], sender.suffix()); + assert_eq!(exec_output.get_stack_element(0), sender.suffix()); + assert_eq!(exec_output.get_stack_element(1), sender.prefix().as_felt()); Ok(()) } @@ -171,8 +173,9 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { // Creates a mockchain with an account and a note that it can consume let tx_context = { let mut builder = MockChain::builder(); - let account = builder - .add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -207,10 +210,16 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { for asset in note.assets().iter() { code += &format!( r#" - # assert the asset is correct - dup padw movup.4 mem_loadw_be push.{asset} assert_eqw.err="asset mismatch" push.4 add + dup padw movup.4 mem_loadw_le push.{ASSET_KEY} + assert_eqw.err="asset key mismatch" + + dup padw movup.4 add.{ASSET_VALUE_OFFSET} mem_loadw_le push.{ASSET_VALUE} + assert_eqw.err="asset value mismatch" + + add.{ASSET_SIZE} "#, - asset = Word::from(asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); } code @@ -305,12 +314,13 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { } #[tokio::test] -async fn test_active_note_get_inputs() -> anyhow::Result<()> { +async fn test_active_note_get_storage() -> anyhow::Result<()> { // Creates a mockchain with an account and a note that it can consume let tx_context = { let mut builder = MockChain::builder(); - let account = builder - .add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -335,7 +345,7 @@ async fn test_active_note_get_inputs() -> anyhow::Result<()> { r#" # assert the storage items are correct # => [dest_ptr] - dup padw movup.4 mem_loadw_be push.{storage_word} assert_eqw.err="storage items are incorrect" + dup padw movup.4 mem_loadw_le push.{storage_word} assert_eqw.err="storage items are incorrect" # => [dest_ptr] push.4 add @@ -410,7 +420,7 @@ async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { )?; // prepare note data - let serial_num = RpoRandomCoin::new(Word::from([4u32; 4])).draw_word(); + let serial_num = RandomCoin::new(Word::from([4u32; 4])).draw_word(); let tag = NoteTag::with_account_target(target_id); let metadata = NoteMetadata::new(sender_id, NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![]).context("failed to create input note assets")?; @@ -470,8 +480,9 @@ async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { async fn test_active_note_get_serial_number() -> anyhow::Result<()> { let tx_context = { let mut builder = MockChain::builder(); - let account = builder - .add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -502,7 +513,7 @@ async fn test_active_note_get_serial_number() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(code).await?; let serial_number = tx_context.input_notes().get_note(0).note().serial_num(); - assert_eq!(exec_output.get_stack_word_be(0), serial_number); + assert_eq!(exec_output.get_stack_word(0), serial_number); Ok(()) } @@ -510,8 +521,9 @@ async fn test_active_note_get_serial_number() -> anyhow::Result<()> { async fn test_active_note_get_script_root() -> anyhow::Result<()> { let tx_context = { let mut builder = MockChain::builder(); - let account = builder - .add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -542,6 +554,6 @@ async fn test_active_note_get_script_root() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(code).await?; let script_root = tx_context.input_notes().get_note(0).note().script().root(); - assert_eq!(exec_output.get_stack_word_be(0), script_root); + assert_eq!(exec_output.get_stack_word(0), script_root); Ok(()) } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_array.rs b/crates/miden-testing/src/kernel_tests/tx/test_array.rs index 2c8d1c9d16..87d5877a0d 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_array.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_array.rs @@ -1,14 +1,15 @@ //! Tests for the Array utility `get` and `set` procedures. +use miden_protocol::Word; use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ AccountBuilder, AccountComponent, StorageMap, + StorageMapKey, StorageSlot, StorageSlotName, }; -use miden_protocol::{Felt, FieldElement, Word}; use miden_standards::code_builder::CodeBuilder; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; @@ -66,10 +67,7 @@ async fn test_array_get_and_set() -> anyhow::Result<()> { wrapper_library.clone(), vec![StorageSlot::with_map( slot_name.clone(), - StorageMap::with_entries([( - Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::ZERO]), - initial_value, - )])?, + StorageMap::with_entries([(StorageMapKey::empty(), initial_value)])?, )], AccountComponentMetadata::mock("wrapper::component"), )?; @@ -144,7 +142,7 @@ async fn test_array_get_and_set() -> anyhow::Result<()> { async fn test_double_word_array_get_and_set() -> anyhow::Result<()> { let slot_name = StorageSlotName::new(TEST_DOUBLE_WORD_ARRAY_SLOT).expect("slot name should be valid"); - let index = Felt::new(7); + let index = 7; let wrapper_component_code = format!( r#" @@ -185,8 +183,8 @@ async fn test_double_word_array_get_and_set() -> anyhow::Result<()> { vec![StorageSlot::with_map( slot_name.clone(), StorageMap::with_entries([ - (Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, index]), initial_value_0), - (Word::from([Felt::ZERO, Felt::ZERO, Felt::ONE, index]), initial_value_1), + (StorageMapKey::from_array([0, 0, 0, index]), initial_value_0), + (StorageMapKey::from_array([0, 0, 1, index]), initial_value_1), ])?, )], AccountComponentMetadata::mock("wrapper::component"), diff --git a/crates/miden-testing/src/kernel_tests/tx/test_asset.rs b/crates/miden-testing/src/kernel_tests/tx/test_asset.rs index 6299450263..15e49bf6cd 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_asset.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_asset.rs @@ -1,17 +1,40 @@ use miden_protocol::account::AccountId; -use miden_protocol::asset::NonFungibleAsset; -use miden_protocol::testing::account_id::ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET; +use miden_protocol::asset::{ + AssetCallbackFlag, + AssetId, + AssetVaultKey, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, +}; +use miden_protocol::errors::MasmError; +use miden_protocol::errors::tx_kernel::{ + ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT, + ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE, + ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO, + ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO, + ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1, + ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0, + ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE, +}; +use miden_protocol::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, + ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, +}; use miden_protocol::testing::constants::{FUNGIBLE_ASSET_AMOUNT, NON_FUNGIBLE_ASSET_DATA}; -use miden_protocol::{Felt, Hasher, Word}; +use miden_protocol::{Felt, Word}; -use crate::TransactionContextBuilder; +use crate::executor::CodeExecutor; use crate::kernel_tests::tx::ExecutionOutputExt; +use crate::{TransactionContextBuilder, assert_execution_error}; #[tokio::test] async fn test_create_fungible_asset_succeeds() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET) .build()?; + let expected_asset = FungibleAsset::new(tx_context.account().id(), FUNGIBLE_ASSET_AMOUNT)?; let code = format!( " @@ -24,25 +47,19 @@ async fn test_create_fungible_asset_succeeds() -> anyhow::Result<()> { # create fungible asset push.{FUNGIBLE_ASSET_AMOUNT} exec.faucet::create_fungible_asset + # => [ASSET_KEY, ASSET_VALUE] # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end " ); let exec_output = &tx_context.execute_code(&code).await?; - let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); - assert_eq!( - exec_output.get_stack_word_be(0), - Word::from([ - Felt::new(FUNGIBLE_ASSET_AMOUNT), - Felt::new(0), - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ]) - ); + assert_eq!(exec_output.get_stack_word(0), expected_asset.to_key_word()); + assert_eq!(exec_output.get_stack_word(4), expected_asset.to_value_word()); + Ok(()) } @@ -52,7 +69,11 @@ async fn test_create_non_fungible_asset_succeeds() -> anyhow::Result<()> { TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) .build()?; - let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA); + let non_fungible_asset_details = NonFungibleAssetDetails::new( + NonFungibleAsset::mock_issuer(), + NON_FUNGIBLE_ASSET_DATA.to_vec(), + )?; + let non_fungible_asset = NonFungibleAsset::new(&non_fungible_asset_details)?; let code = format!( " @@ -63,46 +84,182 @@ async fn test_create_non_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # push non-fungible asset data hash onto the stack - push.{non_fungible_asset_data_hash} + push.{NON_FUNGIBLE_ASSET_DATA_HASH} exec.faucet::create_non_fungible_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - non_fungible_asset_data_hash = Hasher::hash(&NON_FUNGIBLE_ASSET_DATA), + NON_FUNGIBLE_ASSET_DATA_HASH = non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; - assert_eq!(exec_output.get_stack_word_be(0), Word::from(non_fungible_asset)); + + assert_eq!(exec_output.get_stack_word(0), non_fungible_asset.to_key_word()); + assert_eq!(exec_output.get_stack_word(4), non_fungible_asset.to_value_word()); Ok(()) } +#[rstest::rstest] +#[case::account_is_not_non_fungible_faucet( + ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE.try_into()?, + AssetId::default(), + ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE +)] +#[case::asset_id_suffix_mismatch( + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(0u32), Felt::from(3u32)), + ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0 +)] +#[case::asset_id_prefix_mismatch( + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(2u32), Felt::from(0u32)), + ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1 +)] #[tokio::test] -async fn test_validate_non_fungible_asset() -> anyhow::Result<()> { - let tx_context = - TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) - .build()?; +async fn test_validate_non_fungible_asset( + #[case] account_id: AccountId, + #[case] asset_id: AssetId, + #[case] expected_err: MasmError, +) -> anyhow::Result<()> { + let code = format!( + " + use $kernel::non_fungible_asset + + begin + # a random asset value + push.[2, 3, 4, 5] + # => [hash0 = 2, hash1 = 3, 4, 5] + + push.{account_id_prefix} + push.{account_id_suffix} + push.{asset_id_prefix} + push.{asset_id_suffix} + # => [ASSET_KEY, ASSET_VALUE] - let non_fungible_asset = Word::from(NonFungibleAsset::mock(&[1, 2, 3])); + exec.non_fungible_asset::validate + # truncate the stack + swapdw dropw dropw + end + ", + asset_id_suffix = asset_id.suffix(), + asset_id_prefix = asset_id.prefix(), + account_id_suffix = account_id.suffix(), + account_id_prefix = account_id.prefix().as_felt(), + ); + + let exec_result = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_result, expected_err); + + Ok(()) +} + +#[rstest::rstest] +#[case::account_is_not_fungible_faucet( + ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE.try_into()?, + AssetId::default(), + Word::empty(), + ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE +)] +#[case::asset_id_suffix_is_non_zero( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(1u32), Felt::from(0u32)), + Word::empty(), + ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO +)] +#[case::asset_id_prefix_is_non_zero( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(0u32), Felt::from(1u32)), + Word::empty(), + ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO +)] +#[case::non_amount_value_is_non_zero( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::default(), + Word::from([0, 1, 0, 0u32]), + ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO +)] +#[case::amount_exceeds_max( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::default(), + Word::try_from([FungibleAsset::MAX_AMOUNT + 1, 0, 0, 0])?, + ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT +)] +#[tokio::test] +async fn test_validate_fungible_asset( + #[case] account_id: AccountId, + #[case] asset_id: AssetId, + #[case] asset_value: Word, + #[case] expected_err: MasmError, +) -> anyhow::Result<()> { let code = format!( " - use $kernel::asset + use $kernel::fungible_asset begin - push.{non_fungible_asset} - exec.asset::validate_non_fungible_asset + push.{ASSET_VALUE} + push.{account_id_prefix} + push.{account_id_suffix} + push.{asset_id_prefix} + push.{asset_id_suffix} + # => [ASSET_KEY, ASSET_VALUE] + + exec.fungible_asset::validate # truncate the stack - swapw dropw + swapdw dropw dropw end + ", + asset_id_suffix = asset_id.suffix(), + asset_id_prefix = asset_id.prefix(), + account_id_suffix = account_id.suffix(), + account_id_prefix = account_id.prefix().as_felt(), + ASSET_VALUE = asset_value, + ); + + let exec_result = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_result, expected_err); + + Ok(()) +} + +#[rstest::rstest] +#[case::without_callbacks(AssetCallbackFlag::Disabled)] +#[case::with_callbacks(AssetCallbackFlag::Enabled)] +#[tokio::test] +async fn test_key_to_asset_metadata(#[case] callbacks: AssetCallbackFlag) -> anyhow::Result<()> { + let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; + let vault_key = AssetVaultKey::new(AssetId::default(), faucet_id, callbacks)?; + + let code = format!( " + use $kernel::asset + + begin + push.{ASSET_KEY} + exec.asset::key_to_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY] + + # truncate stack + swapw dropw swap drop + # => [callbacks_enabled] + end + ", + ASSET_KEY = vault_key.to_word(), ); - let exec_output = &tx_context.execute_code(&code).await?; + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + + assert_eq!( + exec_output.get_stack_element(0).as_canonical_u64(), + callbacks.as_u8() as u64, + "MASM key_to_asset_category returned wrong value for {callbacks:?}" + ); - assert_eq!(exec_output.get_stack_word_be(0), non_fungible_asset); Ok(()) } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs b/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs index 3749824d0e..e4bdfefce9 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs @@ -7,7 +7,6 @@ use miden_protocol::asset::{ NonFungibleAsset, NonFungibleAssetDetails, }; -use miden_protocol::errors::AssetVaultError; use miden_protocol::errors::protocol::ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET; use miden_protocol::errors::tx_kernel::{ ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW, @@ -15,15 +14,18 @@ use miden_protocol::errors::tx_kernel::{ ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS, ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND, }; +use miden_protocol::errors::{AssetError, AssetVaultError}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, }; use miden_protocol::testing::constants::{FUNGIBLE_ASSET_AMOUNT, NON_FUNGIBLE_ASSET_DATA}; use miden_protocol::transaction::memory; -use miden_protocol::{Felt, ONE, Word, ZERO}; +use miden_protocol::{ONE, Word}; +use crate::executor::CodeExecutor; use crate::kernel_tests::tx::ExecutionOutputExt; use crate::{TransactionContextBuilder, assert_execution_error}; @@ -41,7 +43,8 @@ async fn get_balance_returns_correct_amount() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{suffix} push.{prefix} + push.{prefix} + push.{suffix} exec.active_account::get_balance # => [balance] @@ -56,7 +59,7 @@ async fn get_balance_returns_correct_amount() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_element(0).as_int(), + exec_output.get_stack_element(0).as_canonical_u64(), tx_context.account().vault().get_balance(faucet_id).unwrap() ); @@ -68,7 +71,7 @@ async fn get_balance_returns_correct_amount() -> anyhow::Result<()> { async fn peek_asset_returns_correct_asset() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_existing_mock_account().build()?; let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); - let asset_key = AssetVaultKey::from_account_id(faucet_id).unwrap(); + let asset_key = AssetVaultKey::new_fungible(faucet_id).unwrap(); let code = format!( r#" @@ -89,20 +92,20 @@ async fn peek_asset_returns_correct_asset() -> anyhow::Result<()> { # => [ASSET_KEY, account_vault_root_ptr] exec.asset_vault::peek_asset - # => [PEEKED_ASSET] + # => [PEEKED_ASSET_VALUE] # truncate the stack swapw dropw end "#, - ASSET_KEY = asset_key + ASSET_KEY = asset_key.to_word() ); let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(tx_context.account().vault().get(asset_key).unwrap()) + exec_output.get_stack_word(0), + tx_context.account().vault().get(asset_key).unwrap().to_value_word() ); Ok(()) @@ -124,7 +127,7 @@ async fn test_get_balance_non_fungible_fails() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{suffix} push.{prefix} + push.{prefix} push.{suffix} exec.active_account::get_balance end ", @@ -155,14 +158,14 @@ async fn test_has_non_fungible_asset() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{non_fungible_asset_key} + push.{NON_FUNGIBLE_ASSET_KEY} exec.active_account::has_non_fungible_asset # truncate the stack swap drop end ", - non_fungible_asset_key = Word::from(non_fungible_asset) + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await?; @@ -178,13 +181,7 @@ async fn test_add_fungible_asset_success() -> anyhow::Result<()> { let mut account_vault = tx_context.account().vault().clone(); let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FungibleAsset::MAX_AMOUNT - FUNGIBLE_ASSET_AMOUNT; - let add_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let add_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -193,21 +190,26 @@ async fn test_add_fungible_asset_success() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::add_asset # truncate the stack - swapw dropw + swapdw dropw dropw end ", - FUNGIBLE_ASSET = Word::from(add_fungible_asset) + FUNGIBLE_ASSET_KEY = add_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = add_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.add_asset(add_fungible_asset).unwrap()) + exec_output.get_stack_word(0), + account_vault + .add_asset(Asset::Fungible(add_fungible_asset)) + .unwrap() + .to_value_word() ); assert_eq!( @@ -225,13 +227,7 @@ async fn test_add_non_fungible_asset_fail_overflow() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FungibleAsset::MAX_AMOUNT - FUNGIBLE_ASSET_AMOUNT + 1; - let add_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let add_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -240,17 +236,20 @@ async fn test_add_non_fungible_asset_fail_overflow() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::add_asset + dropw dropw end ", - FUNGIBLE_ASSET = Word::from(add_fungible_asset) + FUNGIBLE_ASSET_KEY = add_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = add_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; assert_execution_error!(exec_result, ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED); - assert!(account_vault.add_asset(add_fungible_asset).is_err()); + assert!(account_vault.add_asset(Asset::Fungible(add_fungible_asset)).is_err()); Ok(()) } @@ -261,7 +260,7 @@ async fn test_add_non_fungible_asset_success() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into()?; let mut account_vault = tx_context.account().vault().clone(); let add_non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new( - &NonFungibleAssetDetails::new(faucet_id.prefix(), vec![1, 2, 3, 4, 5, 6, 7, 8]).unwrap(), + &NonFungibleAssetDetails::new(faucet_id, vec![1, 2, 3, 4, 5, 6, 7, 8]).unwrap(), )?); let code = format!( @@ -271,21 +270,23 @@ async fn test_add_non_fungible_asset_success() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.account::add_asset # truncate the stack - swapw dropw + swapdw dropw dropw end ", - FUNGIBLE_ASSET = Word::from(add_non_fungible_asset) + NON_FUNGIBLE_ASSET_KEY = add_non_fungible_asset.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = add_non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.add_asset(add_non_fungible_asset)?) + exec_output.get_stack_word(0), + account_vault.add_asset(add_non_fungible_asset)?.to_value_word() ); assert_eq!( @@ -302,7 +303,7 @@ async fn test_add_non_fungible_asset_fail_duplicate() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into().unwrap(); let mut account_vault = tx_context.account().vault().clone(); let non_fungible_asset_details = - NonFungibleAssetDetails::new(faucet_id.prefix(), NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); + NonFungibleAssetDetails::new(faucet_id, NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); let non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new(&non_fungible_asset_details).unwrap()); @@ -313,11 +314,14 @@ async fn test_add_non_fungible_asset_fail_duplicate() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{NON_FUNGIBLE_ASSET} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.account::add_asset + dropw dropw end ", - NON_FUNGIBLE_ASSET = Word::from(non_fungible_asset) + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; @@ -335,13 +339,7 @@ async fn test_remove_fungible_asset_success_no_balance_remaining() -> anyhow::Re let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FUNGIBLE_ASSET_AMOUNT; - let remove_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let remove_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -350,22 +348,24 @@ async fn test_remove_fungible_asset_success_no_balance_remaining() -> anyhow::Re begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - FUNGIBLE_ASSET = Word::from(remove_fungible_asset) + FUNGIBLE_ASSET_KEY = remove_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = remove_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; - assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.remove_asset(remove_fungible_asset).unwrap()) - ); + let remaining = account_vault + .remove_asset(Asset::Fungible(remove_fungible_asset))? + .expect("fungible removal should return remaining asset"); + assert_eq!(exec_output.get_stack_word(0), remaining.to_value_word()); assert_eq!( exec_output.get_kernel_mem_word(memory::NATIVE_ACCT_VAULT_ROOT_PTR), @@ -380,13 +380,7 @@ async fn test_remove_fungible_asset_fail_remove_too_much() -> anyhow::Result<()> let tx_context = TransactionContextBuilder::with_existing_mock_account().build()?; let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FUNGIBLE_ASSET_AMOUNT + 1; - let remove_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let remove_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -395,11 +389,13 @@ async fn test_remove_fungible_asset_fail_remove_too_much() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset end ", - FUNGIBLE_ASSET = Word::from(remove_fungible_asset) + FUNGIBLE_ASSET_KEY = remove_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = remove_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; @@ -419,13 +415,7 @@ async fn test_remove_fungible_asset_success_balance_remaining() -> anyhow::Resul let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FUNGIBLE_ASSET_AMOUNT - 1; - let remove_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let remove_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -434,22 +424,24 @@ async fn test_remove_fungible_asset_success_balance_remaining() -> anyhow::Resul begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - FUNGIBLE_ASSET = Word::from(remove_fungible_asset) + FUNGIBLE_ASSET_KEY = remove_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = remove_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; - assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.remove_asset(remove_fungible_asset).unwrap()) - ); + let remaining = account_vault + .remove_asset(Asset::Fungible(remove_fungible_asset))? + .expect("fungible removal should return remaining asset"); + assert_eq!(exec_output.get_stack_word(0), remaining.to_value_word()); assert_eq!( exec_output.get_kernel_mem_word(memory::NATIVE_ACCT_VAULT_ROOT_PTR), @@ -466,7 +458,7 @@ async fn test_remove_inexisting_non_fungible_asset_fails() -> anyhow::Result<()> let mut account_vault = tx_context.account().vault().clone(); let non_fungible_asset_details = - NonFungibleAssetDetails::new(faucet_id.prefix(), NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); + NonFungibleAssetDetails::new(faucet_id, NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); let nonfungible = NonFungibleAsset::new(&non_fungible_asset_details).unwrap(); let non_existent_non_fungible_asset = Asset::NonFungible(nonfungible); @@ -483,11 +475,13 @@ async fn test_remove_inexisting_non_fungible_asset_fails() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset end ", - FUNGIBLE_ASSET = Word::from(non_existent_non_fungible_asset) + FUNGIBLE_ASSET_KEY = non_existent_non_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = non_existent_non_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; @@ -508,7 +502,7 @@ async fn test_remove_non_fungible_asset_success() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into().unwrap(); let mut account_vault = tx_context.account().vault().clone(); let non_fungible_asset_details = - NonFungibleAssetDetails::new(faucet_id.prefix(), NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); + NonFungibleAssetDetails::new(faucet_id, NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); let non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new(&non_fungible_asset_details).unwrap()); @@ -519,22 +513,25 @@ async fn test_remove_non_fungible_asset_success() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - FUNGIBLE_ASSET = Word::from(non_fungible_asset) + FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; - assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.remove_asset(non_fungible_asset).unwrap()) + assert!( + account_vault.remove_asset(non_fungible_asset)?.is_none(), + "non-fungible removal should return None" ); + assert_eq!(exec_output.get_stack_word(0), Word::default()); assert_eq!( exec_output.get_kernel_mem_word(memory::NATIVE_ACCT_VAULT_ROOT_PTR), @@ -543,3 +540,177 @@ async fn test_remove_non_fungible_asset_success() -> anyhow::Result<()> { Ok(()) } + +/// Tests that adding two fungible assets results in the expected value. +#[tokio::test] +async fn test_merge_fungible_asset_success() -> anyhow::Result<()> { + let asset0 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT); + let asset1 = FungibleAsset::mock(FungibleAsset::MAX_AMOUNT - FUNGIBLE_ASSET_AMOUNT); + let merged_asset = asset0.unwrap_fungible().add(asset1.unwrap_fungible())?; + + // Check merging is commutative by checking asset0 + asset1 = asset1 + asset0. + for (asset_a, asset_b) in [(asset0, asset1), (asset1, asset0)] { + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSETA} + push.{ASSETB} + exec.fungible_asset::merge + # => [MERGED_ASSET] + + # truncate the stack + swapw dropw + end + ", + ASSETA = asset_a.to_value_word(), + ASSETB = asset_b.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + + assert_eq!(exec_output.get_stack_word(0), merged_asset.to_value_word()); + } + + Ok(()) +} + +/// Tests that adding two fungible assets fails when the added amounts exceed +/// [`FungibleAsset::MAX_AMOUNT`]. +#[tokio::test] +async fn test_merge_fungible_asset_fails_when_max_amount_exceeded() -> anyhow::Result<()> { + let asset0 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT); + let asset1 = FungibleAsset::mock(FungibleAsset::MAX_AMOUNT + 1 - FUNGIBLE_ASSET_AMOUNT); + + // Check merging fails for both asset0 + asset1 and asset1 + asset0. + for (asset_a, asset_b) in [(asset0, asset1), (asset1, asset0)] { + // Sanity check that the Rust implementation errors. + assert_matches!( + asset_a.unwrap_fungible().add(asset_b.unwrap_fungible()).unwrap_err(), + AssetError::FungibleAssetAmountTooBig(_) + ); + + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSETA} + push.{ASSETB} + exec.fungible_asset::merge + # => [MERGED_ASSET] + + # truncate the stack + swapw dropw + end + ", + ASSETA = asset_a.to_value_word(), + ASSETB = asset_b.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_output, ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED); + } + + Ok(()) +} + +/// Tests that splitting a fungible asset returns the correct remaining amount. +#[rstest::rstest] +#[case::different_amounts(FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT), FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT - 1))] +#[case::same_amounts( + FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT), + FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT) +)] +#[tokio::test] +async fn test_split_fungible_asset_success( + #[case] asset0: Asset, + #[case] asset1: Asset, +) -> anyhow::Result<()> { + let split_asset = asset0.unwrap_fungible().sub(asset1.unwrap_fungible())?; + + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSET0} + push.{ASSET1} + exec.fungible_asset::split + # => [NEW_ASSET_VALUE_0] + + # truncate the stack + swapw dropw + end + ", + ASSET0 = asset0.to_value_word(), + ASSET1 = asset1.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + + assert_eq!(exec_output.get_stack_word(0), split_asset.to_value_word()); + + Ok(()) +} + +/// Tests that splitting a fungible asset fails when the amount to withdraw exceeds the balance. +#[tokio::test] +async fn test_split_fungible_asset_fails_when_amount_exceeds_balance() -> anyhow::Result<()> { + let asset0 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT); + let asset1 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT + 1); + + // Sanity check that the Rust implementation errors. + assert_matches!( + asset0.unwrap_fungible().sub(asset1.unwrap_fungible()).unwrap_err(), + AssetError::FungibleAssetAmountNotSufficient { .. } + ); + + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSET0} + push.{ASSET1} + exec.fungible_asset::split + # => [SPLIT_ASSET] + + # truncate the stack + swapw dropw + end + ", + ASSET0 = asset0.to_value_word(), + ASSET1 = asset1.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!( + exec_output, + ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW + ); + + Ok(()) +} + +/// Tests that merging two different fungible assets fails. +#[tokio::test] +async fn test_merge_different_fungible_assets_fails() -> anyhow::Result<()> { + // Create two fungible assets from different faucets + let faucet_id1: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); + let faucet_id2: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into().unwrap(); + + let asset0 = FungibleAsset::new(faucet_id1, FUNGIBLE_ASSET_AMOUNT)?; + let asset1 = FungibleAsset::new(faucet_id2, FUNGIBLE_ASSET_AMOUNT)?; + + // Sanity check that the Rust implementation errors when adding assets from different faucets. + assert_matches!( + asset0.add(asset1).unwrap_err(), + AssetError::FungibleAssetInconsistentVaultKeys { .. } + ); + + Ok(()) +} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_auth.rs b/crates/miden-testing/src/kernel_tests/tx/test_auth.rs index c400161482..e24157d1a5 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_auth.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_auth.rs @@ -24,10 +24,10 @@ async fn test_auth_procedure_args() -> anyhow::Result<()> { Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ConditionalAuthComponent); let auth_args = [ - ONE, // incr_nonce = true - Felt::new(99), - Felt::new(98), Felt::new(97), + Felt::new(98), + Felt::new(99), + ONE, // incr_nonce = true ]; let tx_context = TransactionContextBuilder::new(account).auth_args(auth_args.into()).build()?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_callbacks.rs b/crates/miden-testing/src/kernel_tests/tx/test_callbacks.rs new file mode 100644 index 0000000000..a2b827a536 --- /dev/null +++ b/crates/miden-testing/src/kernel_tests/tx/test_callbacks.rs @@ -0,0 +1,708 @@ +extern crate alloc; + +use alloc::collections::BTreeSet; +use alloc::vec::Vec; + +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountComponent, + AccountComponentCode, + AccountId, + AccountStorageMode, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::asset::{ + Asset, + AssetCallbackFlag, + AssetCallbacks, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, +}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::errors::MasmError; +use miden_protocol::note::{NoteTag, NoteType}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; +use miden_standards::account::faucets::BasicFungibleFaucet; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::procedure_digest; +use miden_standards::testing::account_component::MockFaucetComponent; + +use crate::{AccountState, Auth, MockChain, MockChainBuilder, assert_transaction_executor_error}; + +// CONSTANTS +// ================================================================================================ + +/// MASM code for the BlockList callback component. +/// +/// This procedure checks whether the native account (the one receiving the asset) is in a +/// block list stored in a storage map. If the account is blocked, the callback panics. +const BLOCK_LIST_MASM: &str = r#" +use miden::protocol::active_account +use miden::protocol::native_account +use miden::core::word + +const BLOCK_LIST_MAP_SLOT = word("miden::testing::callbacks::block_list") +const ERR_ACCOUNT_BLOCKED = "the account is blocked and cannot receive this asset" + +#! Asserts that the native account is not in the block list. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Panics if the native account is in the block list. +#! +#! Invocation: exec +proc assert_native_account_not_blocked + # Get the native account ID + exec.native_account::get_id + # => [native_acct_suffix, native_acct_prefix] + + # Build account ID map key: [0, 0, suffix, prefix] + push.0.0 + # => [ACCOUNT_ID_KEY] + + # Look up in block list storage map + push.BLOCK_LIST_MAP_SLOT[0..2] + exec.active_account::get_map_item + # => [IS_BLOCKED] + + # If IS_BLOCKED is non-zero, account is blocked. + exec.word::eqz + assert.err=ERR_ACCOUNT_BLOCKED + # => [] +end + +#! Callback invoked when an asset with callbacks enabled is added to an account's vault. +#! +#! Checks whether the receiving account is in the block list. If so, panics. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [ASSET_VALUE, pad(12)] +#! +#! Invocation: call +pub proc on_before_asset_added_to_account + exec.assert_native_account_not_blocked + # => [ASSET_KEY, ASSET_VALUE, pad(8)] + + # drop unused asset key + dropw + # => [ASSET_VALUE, pad(12)] +end + +#! Callback invoked when an asset with callbacks enabled is added to an output note. +#! +#! Checks whether the native account (the note creator) is in the block list. If so, panics. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] +#! Outputs: [ASSET_VALUE, pad(12)] +#! +#! Invocation: call +pub proc on_before_asset_added_to_note + exec.assert_native_account_not_blocked + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + # drop unused asset key + dropw + # => [ASSET_VALUE, note_idx, pad(7)] +end +"#; + +/// The expected error when a blocked account tries to receive an asset with callbacks. +const ERR_ACCOUNT_BLOCKED: MasmError = + MasmError::from_static_str("the account is blocked and cannot receive this asset"); + +// Initialize the Basic Fungible Faucet library only once. +static BLOCK_LIST_COMPONENT_CODE: LazyLock = LazyLock::new(|| { + CodeBuilder::default() + .compile_component_code(BlockList::NAME, BLOCK_LIST_MASM) + .expect("block list library should be valid") +}); + +static BLOCK_LIST_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::testing::callbacks::block_list") + .expect("storage slot name should be valid") +}); + +procedure_digest!( + BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_ACCOUNT, + BlockList::NAME, + BlockList::ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_NAME, + || { BLOCK_LIST_COMPONENT_CODE.as_library() } +); + +procedure_digest!( + BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_NOTE, + BlockList::NAME, + BlockList::ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_NAME, + || { BLOCK_LIST_COMPONENT_CODE.as_library() } +); + +// BLOCK LIST +// ================================================================================================ + +/// A test component that implements a block list for the `on_before_asset_added_to_account` +/// callback. +/// +/// When a faucet distributes assets with callbacks enabled, this component checks whether the +/// receiving account is in the block list. If the account is blocked, the transaction fails. +struct BlockList { + blocked_accounts: BTreeSet, +} + +impl BlockList { + const NAME: &str = "miden::testing::callbacks::block_list"; + + const ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_NAME: &str = "on_before_asset_added_to_account"; + + const ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_NAME: &str = "on_before_asset_added_to_note"; + + /// Creates a new [`BlockList`] with the given set of blocked accounts. + fn new(blocked_accounts: BTreeSet) -> Self { + Self { blocked_accounts } + } + + /// Returns the digest of the `on_before_asset_added_to_account` procedure. + pub fn on_before_asset_added_to_account_digest() -> Word { + *BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_ACCOUNT + } + + /// Returns the digest of the `on_before_asset_added_to_note` procedure. + pub fn on_before_asset_added_to_note_digest() -> Word { + *BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_NOTE + } +} + +impl From for AccountComponent { + fn from(block_list: BlockList) -> Self { + // Build the storage map of blocked accounts + let map_entries: Vec<(StorageMapKey, Word)> = block_list + .blocked_accounts + .iter() + .map(|account_id| { + let map_key = StorageMapKey::new(AccountIdKey::new(*account_id).as_word()); + // Non-zero value means the account is blocked + let map_value = Word::new([Felt::ONE, Felt::ZERO, Felt::ZERO, Felt::ZERO]); + (map_key, map_value) + }) + .collect(); + + let storage_map = StorageMap::with_entries(map_entries) + .expect("btree set should guarantee no duplicates"); + + // Build storage slots: block list map + asset callbacks value slot + let mut storage_slots = + vec![StorageSlot::with_map(BLOCK_LIST_SLOT_NAME.clone(), storage_map)]; + storage_slots.extend( + AssetCallbacks::new() + .on_before_asset_added_to_account( + BlockList::on_before_asset_added_to_account_digest(), + ) + .on_before_asset_added_to_note(BlockList::on_before_asset_added_to_note_digest()) + .into_storage_slots(), + ); + let metadata = AccountComponentMetadata::new( + BlockList::NAME, + [AccountType::FungibleFaucet, AccountType::NonFungibleFaucet], + ) + .with_description("block list callback component for testing"); + + AccountComponent::new(BLOCK_LIST_COMPONENT_CODE.clone(), storage_slots, metadata) + .expect("block list should satisfy the requirements of a valid account component") + } +} + +// TESTS +// ================================================================================================ + +/// Tests that consuming a callbacks-enabled asset succeeds even when the issuing faucet does not +/// have the callback storage slot or when the callback storage slot contains the empty word. +#[rstest::rstest] +#[case::fungible_empty_storage(AccountType::FungibleFaucet, true)] +#[case::fungible_no_storage(AccountType::FungibleFaucet, false)] +#[case::non_fungible_empty_storage(AccountType::NonFungibleFaucet, true)] +#[case::non_fungible_no_storage(AccountType::NonFungibleFaucet, false)] +#[tokio::test] +async fn test_faucet_without_callback_slot_skips_callback( + #[case] account_type: AccountType, + #[case] has_empty_callback_proc_root: bool, +) -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + + // Create a faucet WITHOUT any AssetCallbacks component. + let mut account_builder = AccountBuilder::new([45u8; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(account_type) + .with_component(MockFaucetComponent); + + // If callback proc roots should be empty, add the empty storage slots. + if has_empty_callback_proc_root { + let name = "miden::testing::callbacks"; + let slots = AssetCallbacks::new().into_storage_slots(); + let component = AccountComponent::new( + CodeBuilder::new().compile_component_code(name, "pub proc dummy nop end")?, + slots, + AccountComponentMetadata::mock(name), + )?; + account_builder = account_builder.with_component(component); + } + + let faucet = builder.add_account_from_builder( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + account_builder, + AccountState::Exists, + )?; + + // Create a P2ID note with a callbacks-enabled asset from this faucet. + // The faucet does not have the callback slot, but the asset has callbacks enabled. + let asset = match account_type { + AccountType::FungibleFaucet => Asset::from(FungibleAsset::new(faucet.id(), 100)?), + AccountType::NonFungibleFaucet => Asset::from(NonFungibleAsset::new( + &NonFungibleAssetDetails::new(faucet.id(), vec![1])?, + )?), + _ => unreachable!("test only uses faucet account types"), + } + .with_callbacks(AssetCallbackFlag::Enabled); + + let note = + builder.add_p2id_note(faucet.id(), target_account.id(), &[asset], NoteType::Public)?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + // Consuming the note should succeed: the callback is gracefully skipped because the + // faucet does not define the callback storage slot. + mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await?; + + Ok(()) +} + +// ON_ASSET_ADDED_TO_ACCOUNT TESTS +// ================================================================================================ + +/// Tests that the `on_before_asset_added_to_account` callback receives the correct inputs. +#[tokio::test] +async fn test_on_before_asset_added_to_account_callback_receives_correct_inputs() +-> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + // Create wallet first so we know its ID before building the faucet. + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let wallet_id_suffix = target_account.id().suffix().as_canonical_u64(); + let wallet_id_prefix = target_account.id().prefix().as_u64(); + + let amount: u64 = 100; + + // MASM callback that asserts the inputs match expected values. + let account_callback_masm = format!( + r#" + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [ASSET_VALUE, pad(12)] + pub proc on_before_asset_added_to_account + # Assert native account ID can be retrieved via native_account::get_id + exec.::miden::protocol::native_account::get_id + # => [native_account_suffix, native_account_prefix, ASSET_KEY, ASSET_VALUE, pad(8)] + push.{wallet_id_suffix} assert_eq.err="callback received unexpected native account ID suffix" + push.{wallet_id_prefix} assert_eq.err="callback received unexpected native account ID prefix" + # => [ASSET_KEY, ASSET_VALUE, pad(8)] + + # duplicate the asset value for returning + dupw.1 swapw + # => [ASSET_KEY, ASSET_VALUE, ASSET_VALUE, pad(8)] + + # build the expected asset + push.{amount} + exec.::miden::protocol::active_account::get_id + push.1 + # => [enable_callbacks, active_account_id_suffix, active_account_id_prefix, amount, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, pad(8)] + exec.::miden::protocol::asset::create_fungible_asset + # => [EXPECTED_ASSET_KEY, EXPECTED_ASSET_VALUE, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, pad(8)] + + movupw.2 + assert_eqw.err="callback received unexpected asset key" + # => [EXPECTED_ASSET_VALUE, ASSET_VALUE, ASSET_VALUE, pad(8)] + + assert_eqw.err="callback received unexpected asset value" + # => [ASSET_VALUE, pad(12)] + end + "# + ); + + let faucet = add_faucet_with_callbacks(&mut builder, Some(&account_callback_masm), None)?; + + // Create a P2ID note with a callbacks-enabled fungible asset. + let fungible_asset = + FungibleAsset::new(faucet.id(), amount)?.with_callbacks(AssetCallbackFlag::Enabled); + let note = builder.add_p2id_note( + faucet.id(), + target_account.id(), + &[Asset::Fungible(fungible_asset)], + NoteType::Public, + )?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + // Execute the transaction - should succeed because all callback assertions pass. + mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await?; + + Ok(()) +} + +/// Tests that a blocked account cannot receive an asset with callbacks enabled. +#[rstest::rstest] +#[case::fungible( + AccountType::FungibleFaucet, + |faucet_id| { + Ok(FungibleAsset::new(faucet_id, 100)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[case::non_fungible( + AccountType::NonFungibleFaucet, + |faucet_id| { + let details = NonFungibleAssetDetails::new(faucet_id, vec![1, 2, 3, 4])?; + Ok(NonFungibleAsset::new(&details)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[tokio::test] +async fn test_blocked_account_cannot_receive_asset( + #[case] account_type: AccountType, + #[case] create_asset: impl FnOnce(AccountId) -> anyhow::Result, +) -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let faucet = add_faucet_with_block_list(&mut builder, account_type, [target_account.id()])?; + + let note = builder.add_p2id_note( + faucet.id(), + target_account.id(), + &[create_asset(faucet.id())?], + NoteType::Public, + )?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + let result = mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_ACCOUNT_BLOCKED); + + Ok(()) +} + +// ON_ASSET_ADDED_TO_NOTE TESTS +// ================================================================================================ + +/// Tests that a blocked account cannot add a callbacks-enabled asset to an output note. +#[rstest::rstest] +#[case::fungible( + AccountType::FungibleFaucet, + |faucet_id| { + Ok(FungibleAsset::new(faucet_id, 100)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[case::non_fungible( + AccountType::NonFungibleFaucet, + |faucet_id| { + let details = NonFungibleAssetDetails::new(faucet_id, vec![1, 2, 3, 4])?; + Ok(NonFungibleAsset::new(&details)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[tokio::test] +async fn test_blocked_account_cannot_add_asset_to_note( + #[case] account_type: AccountType, + #[case] create_asset: impl FnOnce(AccountId) -> anyhow::Result, +) -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let faucet = add_faucet_with_block_list(&mut builder, account_type, [target_account.id()])?; + let asset = create_asset(faucet.id())?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + // Build a tx script that creates a private output note and adds the callbacks-enabled asset. + // We use a private note to avoid the public note details requirement in the advice provider. + let recipient = Word::from([0u32, 1, 2, 3]); + let script_code = format!( + r#" + use miden::protocol::output_note + + begin + push.{recipient} + push.{note_type} + push.{tag} + exec.output_note::create + + push.{asset_value} + push.{asset_key} + exec.output_note::add_asset + end + "#, + recipient = recipient, + note_type = NoteType::Private as u8, + tag = NoteTag::default(), + asset_value = asset.to_value_word(), + asset_key = asset.to_key_word(), + ); + + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(&script_code)?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + let result = mock_chain + .build_tx_context(target_account.id(), &[], &[])? + .tx_script(tx_script) + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_ACCOUNT_BLOCKED); + + Ok(()) +} + +/// Tests that the `on_before_asset_added_to_note` callback receives the correct inputs. +/// +/// Creates two output notes so that the asset is added to note at index 1, verifying that +/// `note_idx` is correctly passed to the callback (using 1 instead of the default element of 0). +#[tokio::test] +async fn test_on_before_asset_added_to_note_callback_receives_correct_inputs() -> anyhow::Result<()> +{ + let mut builder = MockChain::builder(); + + // Create wallet first so we know its ID before building the faucet. + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let wallet_id_suffix = target_account.id().suffix().as_canonical_u64(); + let wallet_id_prefix = target_account.id().prefix().as_u64(); + + let amount: u64 = 100; + + // MASM callback that asserts the inputs match expected values. + let note_callback_masm = format!( + r#" + const ERR_WRONG_NOTE_IDX = "callback received unexpected note_idx" + + #! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + #! Outputs: [ASSET_VALUE, pad(12)] + pub proc on_before_asset_added_to_note + # Assert native account ID can be retrieved via native_account::get_id + exec.::miden::protocol::native_account::get_id + # => [native_account_suffix, native_account_prefix, ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + push.{wallet_id_suffix} assert_eq.err="callback received unexpected native account ID suffix" + push.{wallet_id_prefix} assert_eq.err="callback received unexpected native account ID prefix" + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + # Assert note_idx == 1 (we create two notes, adding the asset to the second one) + dup.8 push.1 assert_eq.err=ERR_WRONG_NOTE_IDX + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + # duplicate the asset value for returning + dupw.1 swapw + # => [ASSET_KEY, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + + # build the expected asset + push.{amount} + exec.::miden::protocol::active_account::get_id + push.1 + # => [enable_callbacks, active_account_id_suffix, active_account_id_prefix, amount, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + exec.::miden::protocol::asset::create_fungible_asset + # => [EXPECTED_ASSET_KEY, EXPECTED_ASSET_VALUE, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + + movupw.2 + assert_eqw.err="callback received unexpected asset key" + # => [EXPECTED_ASSET_VALUE, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + + assert_eqw.err="callback received unexpected asset value" + # => [ASSET_VALUE, note_idx, pad(7)] + end + "# + ); + + let faucet = add_faucet_with_callbacks(&mut builder, None, Some(¬e_callback_masm))?; + + // Create a P2ID note with a callbacks-enabled fungible asset. + // Consuming this note adds the asset to the wallet's vault. + let fungible_asset = + FungibleAsset::new(faucet.id(), amount)?.with_callbacks(AssetCallbackFlag::Enabled); + let asset = Asset::Fungible(fungible_asset); + let note = + builder.add_p2id_note(faucet.id(), target_account.id(), &[asset], NoteType::Public)?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + // Build a tx script that creates two output notes and moves the asset from vault to the + // second note (note_idx=1), so we can verify that the callback receives the correct + // note_idx. + let script_code = format!( + r#" + use mock::util + + begin + # Create note 0 (just to consume index 0) + exec.util::create_default_note drop + # => [] + + # Create note 1 + push.{asset_value} + push.{asset_key} + # => [ASSET_KEY, ASSET_VALUE] + exec.util::create_default_note_with_moved_asset + # => [] + + dropw dropw + end + "#, + asset_value = asset.to_value_word(), + asset_key = asset.to_key_word(), + ); + + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(&script_code)?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + // Execute the transaction: consume the P2ID note (asset enters vault), then move the asset + // to output note 1. Should succeed because all callback assertions pass. + mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .tx_script(tx_script) + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await?; + + Ok(()) +} + +// HELPERS +// ================================================================================================ + +/// Builds a fungible faucet with the block list callback component and adds it to the builder. +/// +/// The block list component registers both the account and note callbacks. When a +/// callbacks-enabled asset is added to an account or note, the callback checks whether the +/// native account is in the block list and panics if so. +fn add_faucet_with_block_list( + builder: &mut MockChainBuilder, + account_type: AccountType, + blocked_accounts: impl IntoIterator, +) -> anyhow::Result { + let block_list = BlockList::new(blocked_accounts.into_iter().collect()); + + if !account_type.is_faucet() { + anyhow::bail!("account type must be of type faucet") + } + + let account_builder = AccountBuilder::new([42u8; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(account_type) + .with_component(MockFaucetComponent) + .with_component(block_list); + + builder.add_account_from_builder( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + account_builder, + AccountState::Exists, + ) +} + +/// Builds a fungible faucet with custom callback MASM code and adds it to the builder. +/// +/// `account_callback_masm` and `note_callback_masm` are optional MASM source for the +/// `on_before_asset_added_to_account` and `on_before_asset_added_to_note` procedures. Each +/// string should contain a complete `pub proc ... end` block including any constants needed. +fn add_faucet_with_callbacks( + builder: &mut MockChainBuilder, + account_callback_masm: Option<&str>, + note_callback_masm: Option<&str>, +) -> anyhow::Result { + let component_name = "miden::testing::callbacks::input_validator"; + + let masm_source = + format!("{}\n{}", account_callback_masm.unwrap_or(""), note_callback_masm.unwrap_or(""),); + + let callback_code = + CodeBuilder::default().compile_component_code(component_name, &masm_source)?; + + let mut callbacks = AssetCallbacks::new(); + + if account_callback_masm.is_some() { + let path = format!("{component_name}::on_before_asset_added_to_account"); + let proc_root = callback_code + .as_library() + .get_procedure_root_by_path(path.as_str()) + .expect("account callback procedure should exist"); + callbacks = callbacks.on_before_asset_added_to_account(proc_root); + } + + if note_callback_masm.is_some() { + let path = format!("{component_name}::on_before_asset_added_to_note"); + let proc_root = callback_code + .as_library() + .get_procedure_root_by_path(path.as_str()) + .expect("note callback procedure should exist"); + callbacks = callbacks.on_before_asset_added_to_note(proc_root); + } + + let basic_faucet = BasicFungibleFaucet::new("SYM".try_into()?, 8, Felt::new(1_000_000))?; + + let callback_storage_slots = callbacks.into_storage_slots(); + let callback_metadata = + AccountComponentMetadata::new(component_name, [AccountType::FungibleFaucet]) + .with_description("callback component for testing"); + let callback_component = + AccountComponent::new(callback_code, callback_storage_slots, callback_metadata)?; + + let account_builder = AccountBuilder::new([42; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(AccountType::FungibleFaucet) + .with_component(basic_faucet) + .with_component(callback_component); + + builder.add_account_from_builder( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + account_builder, + AccountState::Exists, + ) +} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs b/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs index 30edc3d950..fafec708e0 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs @@ -1,10 +1,8 @@ use alloc::string::ToString; -use alloc::vec::Vec; use std::borrow::ToOwned; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RandomCoin; use miden_processor::{Felt, ONE}; -use miden_protocol::Word; use miden_protocol::account::{Account, AccountDelta, AccountStorageDelta, AccountVaultDelta}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::errors::tx_kernel::{ @@ -26,12 +24,12 @@ use miden_protocol::transaction::memory::{ OUTPUT_NOTE_ASSET_COMMITMENT_OFFSET, OUTPUT_NOTE_SECTION_OFFSET, }; -use miden_protocol::transaction::{OutputNote, OutputNotes, TransactionOutputs}; +use miden_protocol::transaction::{RawOutputNote, RawOutputNotes, TransactionOutputs}; +use miden_protocol::{Hasher, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::mock_account::MockAccountExt; use miden_standards::testing::note::NoteBuilder; -use super::ZERO; use crate::kernel_tests::tx::ExecutionOutputExt; use crate::utils::{create_p2any_note, create_public_p2any_note}; use crate::{ @@ -55,7 +53,7 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::new(account.clone()) .extend_input_notes(vec![input_note_1]) - .extend_expected_output_notes(vec![OutputNote::Full(output_note_1.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_1.clone())]) .build()?; let code = format!( @@ -74,7 +72,8 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} exec.output_note::add_asset # => [] @@ -87,7 +86,8 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { recipient = output_note_1.recipient().digest(), note_type = Felt::from(output_note_1.metadata().note_type()), tag = Felt::from(output_note_1.metadata().tag()), - asset = Word::from(asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await?; @@ -96,12 +96,12 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { let mut final_account = account.clone(); final_account.increment_nonce(ONE)?; - let output_notes = OutputNotes::new( + let output_notes = RawOutputNotes::new( tx_context .expected_output_notes() .iter() .cloned() - .map(OutputNote::Full) + .map(RawOutputNote::Full) .collect(), )?; @@ -114,30 +114,41 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { .to_commitment(); let account_update_commitment = - miden_protocol::Hasher::merge(&[final_account.to_commitment(), account_delta_commitment]); - - let mut expected_stack = Vec::with_capacity(16); - expected_stack.extend(output_notes.commitment().as_elements().iter().rev()); - expected_stack.extend(account_update_commitment.as_elements().iter().rev()); - expected_stack.extend( - Word::from( - FungibleAsset::new( - tx_context.tx_inputs().block_header().fee_parameters().native_asset_id(), - 0, - ) - .unwrap(), - ) - .iter() - .rev(), - ); - expected_stack.push(Felt::from(u32::MAX)); // Value for tx expiration block number - expected_stack.extend((13..16).map(|_| ZERO)); + Hasher::merge(&[final_account.to_commitment(), account_delta_commitment]); + let fee_asset = FungibleAsset::new( + tx_context.tx_inputs().block_header().fee_parameters().native_asset_id(), + 0, + )?; assert_eq!( - exec_output.stack.as_slice(), - expected_stack.as_slice(), - "Stack state after finalize_transaction does not contain the expected values" + exec_output.get_stack_word(TransactionOutputs::OUTPUT_NOTES_COMMITMENT_WORD_IDX), + output_notes.commitment() + ); + assert_eq!( + exec_output.get_stack_word(TransactionOutputs::ACCOUNT_UPDATE_COMMITMENT_WORD_IDX), + account_update_commitment, + ); + assert_eq!( + exec_output.get_stack_element(TransactionOutputs::NATIVE_ASSET_ID_SUFFIX_ELEMENT_IDX), + fee_asset.faucet_id().suffix(), + ); + assert_eq!( + exec_output.get_stack_element(TransactionOutputs::NATIVE_ASSET_ID_PREFIX_ELEMENT_IDX), + fee_asset.faucet_id().prefix().as_felt() + ); + assert_eq!( + exec_output + .get_stack_element(TransactionOutputs::FEE_AMOUNT_ELEMENT_IDX) + .as_canonical_u64(), + fee_asset.amount().inner() + ); + assert_eq!( + exec_output + .get_stack_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) + .as_canonical_u64(), + u64::from(u32::MAX) ); + assert_eq!(exec_output.get_stack_word(12), Word::empty()); assert_eq!( exec_output.stack.len(), @@ -150,7 +161,7 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { /// Tests that the output note memory section is correctly populated during finalize_transaction. #[tokio::test] async fn test_compute_output_note_id() -> anyhow::Result<()> { - let mut rng = RpoRandomCoin::new(Word::from([3, 4, 5, 6u32])); + let mut rng = RandomCoin::new(Word::from([3, 4, 5, 6u32])); let account = Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); let mut assets = account.vault().assets(); let asset0 = assets.next().unwrap(); @@ -161,8 +172,8 @@ async fn test_compute_output_note_id() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::new(account.clone()) .extend_expected_output_notes(vec![ - OutputNote::Full(output_note0.clone()), - OutputNote::Full(output_note1.clone()), + RawOutputNote::Full(output_note0.clone()), + RawOutputNote::Full(output_note1.clone()), ]) .build()?; @@ -187,14 +198,16 @@ async fn test_compute_output_note_id() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note # => [] ", recipient = note.recipient().digest(), note_type = Felt::from(note.metadata().note_type()), tag = Felt::from(note.metadata().tag()), - asset = Word::from(asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), )); } @@ -231,13 +244,21 @@ async fn test_compute_output_note_id() -> anyhow::Result<()> { Ok(()) } -/// Tests that a transaction fails due to the asset preservation rules when the input note has an -/// asset with amount 100 and the output note has the same asset with amount 200. +/// Tests that a transaction fails when assets aren't preserved, i.e. +/// - when the input note has asset amount 100 and the output note has asset amount 200. +/// - when the input note has asset amount 200 and the output note has asset amount 100. +#[rstest::rstest] +#[case::outputs_exceed_inputs(100, 200)] +#[case::inputs_exceed_outputs(200, 100)] #[tokio::test] -async fn epilogue_fails_when_num_output_assets_exceed_num_input_assets() -> anyhow::Result<()> { - // Create an input asset with amount 100 and an output asset with amount 200. - let input_asset = FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, 100)?; - let output_asset = input_asset.add(input_asset)?; +async fn epilogue_fails_when_assets_arent_preserved( + #[case] input_amount: u64, + #[case] output_amount: u64, +) -> anyhow::Result<()> { + let input_asset = + FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, input_amount)?; + let output_asset = + FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, output_amount)?; let mut builder = MockChain::builder(); let account = builder.add_existing_mock_account(Auth::IncrNonce)?; @@ -247,60 +268,7 @@ async fn epilogue_fails_when_num_output_assets_exceed_num_input_assets() -> anyh let input_note = NoteBuilder::new(account.id(), *builder.rng_mut()) .add_assets([Asset::from(input_asset)]) .build()?; - builder.add_output_note(OutputNote::Full(input_note.clone())); - let mock_chain = builder.build()?; - - let code = format!( - " - use mock::account - use mock::util - - begin - # create a note with the output asset - push.{OUTPUT_ASSET} - exec.util::create_default_note_with_asset - # => [] - end - ", - OUTPUT_ASSET = Word::from(output_asset), - ); - - let builder = CodeBuilder::with_mock_libraries(); - let source_manager = builder.source_manager(); - let tx_script = builder.compile_tx_script(code)?; - - let tx_context = mock_chain - .build_tx_context(TxContextInput::AccountId(account.id()), &[], &[input_note])? - .tx_script(tx_script) - .with_source_manager(source_manager) - .build()?; - - let exec_output = tx_context.execute().await; - assert_transaction_executor_error!( - exec_output, - ERR_EPILOGUE_TOTAL_NUMBER_OF_ASSETS_MUST_STAY_THE_SAME - ); - - Ok(()) -} - -/// Tests that a transaction fails due to the asset preservation rules when the input note has an -/// asset with amount 200 and the output note has the same asset with amount 100. -#[tokio::test] -async fn epilogue_fails_when_num_input_assets_exceed_num_output_assets() -> anyhow::Result<()> { - // Create an input asset with amount 200 and an output asset with amount 100. - let output_asset = FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, 100)?; - let input_asset = output_asset.add(output_asset)?; - - let mut builder = MockChain::builder(); - let account = builder.add_existing_mock_account(Auth::IncrNonce)?; - // Add an input note that (automatically) adds its assets to the transaction's input vault, but - // _does not_ add the asset to the account. This is just to keep the test conceptually simple - - // there is no account involved. - let input_note = NoteBuilder::new(account.id(), *builder.rng_mut()) - .add_assets([Asset::from(output_asset)]) - .build()?; - builder.add_output_note(OutputNote::Full(input_note.clone())); + builder.add_output_note(RawOutputNote::Full(input_note.clone())); let mock_chain = builder.build()?; let code = format!( @@ -310,12 +278,14 @@ async fn epilogue_fails_when_num_input_assets_exceed_num_output_assets() -> anyh begin # create a note with the output asset - push.{OUTPUT_ASSET} + push.{OUTPUT_ASSET_VALUE} + push.{OUTPUT_ASSET_KEY} exec.util::create_default_note_with_asset # => [] end ", - OUTPUT_ASSET = Word::from(input_asset), + OUTPUT_ASSET_KEY = output_asset.to_key_word(), + OUTPUT_ASSET_VALUE = output_asset.to_value_word(), ); let builder = CodeBuilder::with_mock_libraries(); @@ -379,7 +349,7 @@ async fn test_block_expiration_height_monotonically_decreases() -> anyhow::Resul assert_eq!( exec_output .get_stack_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) - .as_int(), + .as_canonical_u64(), expected_expiry ); } @@ -439,7 +409,7 @@ async fn test_no_expiration_delta_set() -> anyhow::Result<()> { assert_eq!( exec_output .get_stack_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) - .as_int() as u32, + .as_canonical_u64() as u32, u32::MAX ); @@ -498,7 +468,7 @@ async fn epilogue_fails_on_account_state_change_without_nonce_increment() -> any push.91.92.93.94 push.MOCK_VALUE_SLOT0[0..2] repeat.5 movup.5 drop end - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] call.account::set_item # => [PREV_VALUE] dropw diff --git a/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs b/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs index 00d6114a4e..59796ae2ec 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs @@ -1,13 +1,21 @@ use alloc::sync::Arc; +use miden_protocol::Felt; use miden_protocol::account::{Account, AccountBuilder, AccountComponent, AccountId, AccountType}; use miden_protocol::assembly::DefaultSourceManager; -use miden_protocol::asset::{FungibleAsset, NonFungibleAsset}; +use miden_protocol::asset::{ + AssetCallbackFlag, + AssetId, + AssetVaultKey, + FungibleAsset, + NonFungibleAsset, +}; use miden_protocol::errors::tx_kernel::{ + ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT, ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN, - ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS, ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN, ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW, + ERR_VAULT_INVALID_ENABLE_CALLBACKS, ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND, }; use miden_protocol::testing::account_id::{ @@ -23,7 +31,6 @@ use miden_protocol::testing::constants::{ NON_FUNGIBLE_ASSET_DATA_2, }; use miden_protocol::testing::noop_auth_component::NoopAuthComponent; -use miden_protocol::{Felt, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::mock_account::MockAccountExt; @@ -50,28 +57,32 @@ async fn test_mint_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # mint asset - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.mock_faucet::mint # assert the correct asset is returned - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} assert_eqw.err="minted asset does not match expected asset" # assert the input vault has been updated exec.memory::get_input_vault_root_ptr - push.{ASSET_KEY} + push.{FUNGIBLE_ASSET_KEY} exec.asset_vault::get_asset - # => [ASSET] + # => [ASSET_VALUE] # extract balance from asset - drop drop drop + movdn.3 drop drop drop # => [balance] push.{FUNGIBLE_ASSET_AMOUNT} assert_eq.err="input vault should contain minted asset" + + # truncate the stack + dropw end "#, - FUNGIBLE_ASSET = Word::from(asset), - ASSET_KEY = asset.vault_key(), + FUNGIBLE_ASSET_KEY = asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), ); TransactionContextBuilder::with_fungible_faucet(faucet_id.into()) @@ -86,17 +97,20 @@ async fn test_mint_fungible_asset_succeeds() -> anyhow::Result<()> { #[tokio::test] async fn mint_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::mint end ", - asset = Word::from(FungibleAsset::mock(50)) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -116,6 +130,7 @@ async fn test_mint_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> TransactionContextBuilder::with_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1) .build()?; + let asset = FungibleAsset::mock(5); let code = format!( " use $kernel::prologue @@ -123,11 +138,13 @@ async fn test_mint_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::mint end ", - asset = Word::from(FungibleAsset::mock(5)) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -136,6 +153,41 @@ async fn test_mint_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> Ok(()) } +/// Tests that minting a fungible asset on a non-faucet account fails when the key has its asset +/// metadata (lower 8 bits) set to u8::MAX. +#[tokio::test] +async fn mint_fungible_asset_fails_on_invalid_asset_metadata() -> anyhow::Result<()> { + let asset = FungibleAsset::mock(50); + + let mut vault_key_word = asset.to_key_word(); + vault_key_word[2] = Felt::try_from(vault_key_word[2].as_canonical_u64() | u8::MAX as u64)?; + + let code = format!( + " + use $kernel::prologue + use mock::faucet + + begin + exec.prologue::prepare_transaction + push.{ASSET_VALUE} + push.{ASSET_KEY} + call.faucet::mint + dropw dropw + end + ", + ASSET_KEY = vault_key_word, + ASSET_VALUE = asset.to_value_word(), + ); + + let result = TransactionContextBuilder::with_fungible_faucet(asset.faucet_id().into()) + .build()? + .execute_code(&code) + .await; + assert_execution_error!(result, ERR_VAULT_INVALID_ENABLE_CALLBACKS); + + Ok(()) +} + /// Tests that minting a fungible asset with [`FungibleAsset::MAX_AMOUNT`] + 1 fails. #[tokio::test] async fn test_mint_fungible_asset_fails_when_amount_exceeds_max_representable_amount() @@ -145,19 +197,21 @@ async fn test_mint_fungible_asset_fails_when_amount_exceeds_max_representable_am use mock::faucet begin - push.{max_amount_plus_1} push.0 - push.{faucet_id_suffix} - push.{faucet_id_prefix} - # => [faucet_id_prefix, faucet_id_suffix, 0, max_amount_plus_1] + push.0 + push.0 + push.{max_amount_plus_1} + # => [ASSET_VALUE] + + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE] call.faucet::mint - dropw + dropw dropw end ", - faucet_id_prefix = FungibleAsset::mock_issuer().prefix().as_felt(), - faucet_id_suffix = FungibleAsset::mock_issuer().suffix(), - max_amount_plus_1 = Felt::try_from(FungibleAsset::MAX_AMOUNT + 1).unwrap(), + ASSET_KEY = FungibleAsset::mock(0).to_key_word(), + max_amount_plus_1 = FungibleAsset::MAX_AMOUNT + 1, ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -168,10 +222,7 @@ async fn test_mint_fungible_asset_fails_when_amount_exceeds_max_representable_am .execute() .await; - assert_transaction_executor_error!( - result, - ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS - ); + assert_transaction_executor_error!(result, ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT); Ok(()) } @@ -198,25 +249,26 @@ async fn test_mint_non_fungible_asset_succeeds() -> anyhow::Result<()> { begin # mint asset exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.mock_faucet::mint # assert the correct asset is returned - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} assert_eqw.err="minted asset does not match expected asset" # assert the input vault has been updated. exec.memory::get_input_vault_root_ptr - push.{ASSET_KEY} + push.{NON_FUNGIBLE_ASSET_KEY} exec.asset_vault::get_asset - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} assert_eqw.err="vault should contain asset" dropw end "#, - ASSET_KEY = non_fungible_asset.vault_key(), - non_fungible_asset = Word::from(non_fungible_asset), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset.to_value_word(), ); tx_context.execute_code(&code).await?; @@ -239,11 +291,13 @@ async fn test_mint_non_fungible_asset_fails_inconsistent_faucet_id() -> anyhow:: begin exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{asset_value} + push.{asset_key} call.faucet::mint end ", - non_fungible_asset = Word::from(non_fungible_asset) + asset_key = non_fungible_asset.to_key_word(), + asset_value = non_fungible_asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -256,17 +310,20 @@ async fn test_mint_non_fungible_asset_fails_inconsistent_faucet_id() -> anyhow:: #[tokio::test] async fn mint_non_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::mint end ", - asset = Word::from(FungibleAsset::mock(50)) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -280,6 +337,42 @@ async fn mint_non_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result Ok(()) } +/// Tests minting a fungible asset with callbacks enabled. +#[tokio::test] +async fn test_mint_fungible_asset_with_callbacks_enabled() -> anyhow::Result<()> { + let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); + let asset = FungibleAsset::new(faucet_id, FUNGIBLE_ASSET_AMOUNT)?; + + // Build a vault key with callbacks enabled. + let vault_key = AssetVaultKey::new(AssetId::default(), faucet_id, AssetCallbackFlag::Enabled)?; + + let code = format!( + r#" + use mock::faucet->mock_faucet + use $kernel::prologue + + begin + exec.prologue::prepare_transaction + + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} + call.mock_faucet::mint + + dropw dropw + end + "#, + FUNGIBLE_ASSET_KEY = vault_key.to_word(), + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), + ); + + TransactionContextBuilder::with_fungible_faucet(faucet_id.into()) + .build()? + .execute_code(&code) + .await?; + + Ok(()) +} + // FUNGIBLE FAUCET BURN TESTS // ================================================================================================ @@ -303,30 +396,29 @@ async fn test_burn_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # burn asset - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.mock_faucet::burn - # assert the correct asset is returned - push.{FUNGIBLE_ASSET} - assert_eqw.err="burnt asset does not match expected asset" - # assert the input vault has been updated exec.memory::get_input_vault_root_ptr - push.{ASSET_KEY} + push.{FUNGIBLE_ASSET_KEY} exec.asset_vault::get_asset - # => [ASSET] + # => [ASSET_VALUE] # extract balance from asset - drop drop drop + movdn.3 drop drop drop # => [balance] push.{final_input_vault_asset_amount} assert_eq.err="vault balance does not match expected balance" + + exec.::miden::core::sys::truncate_stack end "#, - FUNGIBLE_ASSET = Word::from(asset), - ASSET_KEY = asset.vault_key(), + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), + FUNGIBLE_ASSET_KEY = asset.to_key_word(), final_input_vault_asset_amount = CONSUMED_ASSET_1_AMOUNT - FUNGIBLE_ASSET_AMOUNT, ); @@ -339,17 +431,20 @@ async fn test_burn_fungible_asset_succeeds() -> anyhow::Result<()> { #[tokio::test] async fn burn_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - asset = Word::from(FungibleAsset::mock(50)) + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), + FUNGIBLE_ASSET_KEY = asset.to_key_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -370,6 +465,7 @@ async fn test_burn_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> .build()?; let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1).unwrap(); + let fungible_asset = FungibleAsset::new(faucet_id, FUNGIBLE_ASSET_AMOUNT)?; let code = format!( " @@ -378,12 +474,13 @@ async fn test_burn_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET_AMOUNT} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), + FUNGIBLE_ASSET_VALUE = fungible_asset.to_value_word(), + FUNGIBLE_ASSET_KEY = fungible_asset.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -399,6 +496,7 @@ async fn test_burn_fungible_asset_insufficient_input_amount() -> anyhow::Result< .build()?; let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1).unwrap(); + let fungible_asset = FungibleAsset::new(faucet_id, CONSUMED_ASSET_1_AMOUNT + 1)?; let code = format!( " @@ -407,13 +505,13 @@ async fn test_burn_fungible_asset_insufficient_input_amount() -> anyhow::Result< begin exec.prologue::prepare_transaction - push.{saturating_amount} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), - saturating_amount = CONSUMED_ASSET_1_AMOUNT + 1 + FUNGIBLE_ASSET_VALUE = fungible_asset.to_value_word(), + FUNGIBLE_ASSET_KEY = fungible_asset.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -447,27 +545,27 @@ async fn test_burn_non_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # add non-fungible asset to the vault - exec.memory::get_input_vault_root_ptr push.{non_fungible_asset} + exec.memory::get_input_vault_root_ptr + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} exec.asset_vault::add_non_fungible_asset dropw # check that the non-fungible asset is presented in the input vault exec.memory::get_input_vault_root_ptr - push.{ASSET_KEY} + push.{NON_FUNGIBLE_ASSET_KEY} exec.asset_vault::get_asset - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} assert_eqw.err="input vault should contain the asset" # burn the non-fungible asset - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.mock_faucet::burn - - # assert the correct asset is returned - push.{non_fungible_asset} - assert_eqw.err="burnt asset does not match expected asset" + dropw # assert the input vault has been updated and does not have the burnt asset exec.memory::get_input_vault_root_ptr - push.{ASSET_KEY} + push.{NON_FUNGIBLE_ASSET_KEY} exec.asset_vault::get_asset # the returned word should be empty, indicating the asset is absent padw assert_eqw.err="input vault should not contain burned asset" @@ -475,8 +573,8 @@ async fn test_burn_non_fungible_asset_succeeds() -> anyhow::Result<()> { dropw end "#, - ASSET_KEY = non_fungible_asset_burnt.vault_key(), - non_fungible_asset = Word::from(non_fungible_asset_burnt), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset_burnt.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset_burnt.to_value_word(), ); tx_context.execute_code(&code).await?; @@ -499,11 +597,13 @@ async fn test_burn_non_fungible_asset_fails_does_not_exist() -> anyhow::Result<( begin # burn asset exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - non_fungible_asset = Word::from(non_fungible_asset_burnt) + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset_burnt.to_value_word(), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset_burnt.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -516,17 +616,20 @@ async fn test_burn_non_fungible_asset_fails_does_not_exist() -> anyhow::Result<( #[tokio::test] async fn burn_non_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::burn end ", - asset = Word::from(FungibleAsset::mock(50)) + ASSET_VALUE = asset.to_value_word(), + ASSET_KEY = asset.to_key_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -558,11 +661,13 @@ async fn test_burn_non_fungible_asset_fails_inconsistent_faucet_id() -> anyhow:: begin # burn asset exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - non_fungible_asset = Word::from(non_fungible_asset_burnt) + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset_burnt.to_value_word(), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset_burnt.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -589,8 +694,10 @@ fn setup_non_faucet_account() -> anyhow::Result { "pub use ::miden::protocol::faucet::mint pub use ::miden::protocol::faucet::burn", )?; - let metadata = AccountComponentMetadata::new("test::non_faucet_component") - .with_supported_type(AccountType::RegularAccountUpdatableCode); + let metadata = AccountComponentMetadata::new( + "test::non_faucet_component", + [AccountType::RegularAccountUpdatableCode], + ); let faucet_component = AccountComponent::new(faucet_code, vec![], metadata)?; Ok(AccountBuilder::new([4; 32]) .account_type(AccountType::RegularAccountUpdatableCode) diff --git a/crates/miden-testing/src/kernel_tests/tx/test_fee.rs b/crates/miden-testing/src/kernel_tests/tx/test_fee.rs index 0a865d1098..5b5070a521 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_fee.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_fee.rs @@ -1,13 +1,13 @@ use anyhow::Context; use assert_matches::assert_matches; -use miden_protocol::account::{AccountId, StorageMap, StorageSlot, StorageSlotName}; +use miden_crypto::rand::test_utils::rand_value; +use miden_protocol::account::{AccountId, StorageMap, StorageMapKey, StorageSlot, StorageSlotName}; use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; use miden_protocol::note::NoteType; use miden_protocol::testing::account_id::ACCOUNT_ID_NATIVE_ASSET_FAUCET; -use miden_protocol::transaction::{ExecutedTransaction, OutputNote}; +use miden_protocol::transaction::{ExecutedTransaction, RawOutputNote}; use miden_protocol::{self, Felt, Word}; use miden_tx::TransactionExecutorError; -use winter_rand_utils::rand_value; use crate::utils::create_public_p2any_note; use crate::{Auth, MockChain}; @@ -33,7 +33,7 @@ async fn create_account_with_fees() -> anyhow::Result<()> { .context("failed to execute account-creating transaction")?; let expected_fee = tx.compute_fee(); - assert_eq!(expected_fee, tx.fee().amount()); + assert_eq!(expected_fee, tx.fee().amount().inner()); // We expect that the new account contains the note_amount minus the paid fee. let added_asset = FungibleAsset::new(chain.native_asset_id(), note_amount)?.sub(tx.fee())?; @@ -99,7 +99,7 @@ async fn num_tx_cycles_after_compute_fee_are_less_than_estimated( // These constants should always be updated together with the equivalent constants in // epilogue.masm. const SMT_SET_ADDITIONAL_CYCLES: usize = 250; - const NUM_POST_COMPUTE_FEE_CYCLES: usize = 500; + const NUM_POST_COMPUTE_FEE_CYCLES: usize = 608; assert!( tx.measurements().after_tx_cycles_obtained @@ -135,7 +135,7 @@ async fn mutate_account_with_storage() -> anyhow::Result { StorageSlot::with_value(StorageSlotName::mock(0), rand_value()), StorageSlot::with_map( StorageSlotName::mock(1), - StorageMap::with_entries([(rand_value(), rand_value())])?, + StorageMap::with_entries([(StorageMapKey::from_raw(rand_value()), rand_value())])?, ), ], [Asset::from(native_asset), NonFungibleAsset::mock(&[1, 2, 3, 4])], @@ -166,7 +166,7 @@ async fn create_output_notes() -> anyhow::Result { [ StorageSlot::with_map( StorageSlotName::mock(0), - StorageMap::with_entries([(rand_value(), rand_value())])?, + StorageMap::with_entries([(StorageMapKey::from_raw(rand_value()), rand_value())])?, ), StorageSlot::with_value(StorageSlotName::mock(1), rand_value()), ], @@ -178,7 +178,7 @@ async fn create_output_notes() -> anyhow::Result { // This creates a note that adds the given assets to the account vault. let asset_note = create_public_p2any_note(account.id(), [Asset::from(note_asset0.add(note_asset1)?)]); - builder.add_output_note(OutputNote::Full(asset_note.clone())); + builder.add_output_note(RawOutputNote::Full(asset_note.clone())); let output_note0 = create_public_p2any_note(account.id(), [note_asset0.into()]); let output_note1 = create_public_p2any_note(account.id(), [note_asset1.into()]); @@ -188,8 +188,8 @@ async fn create_output_notes() -> anyhow::Result { .build()? .build_tx_context(account, &[asset_note.id(), spawn_note.id()], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(output_note0), - OutputNote::Full(output_note1), + RawOutputNote::Full(output_note0), + RawOutputNote::Full(output_note1), ]) .build()? .execute() diff --git a/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs b/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs index b6ee630f85..9ae7d70fbd 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs @@ -2,8 +2,8 @@ use alloc::sync::Arc; use alloc::vec; use alloc::vec::Vec; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{AdviceInputs, EMPTY_WORD, Felt}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{EMPTY_WORD, ExecutionOutput, Felt}; use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ Account, @@ -43,7 +43,7 @@ use miden_protocol::transaction::memory::{ UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR, UPCOMING_FOREIGN_PROCEDURE_PTR, }; -use miden_protocol::{FieldElement, Word, ZERO}; +use miden_protocol::{Word, ZERO}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_tx::LocalTransactionProver; @@ -150,9 +150,9 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.{get_item_foreign_root} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure # => [STORAGE_VALUE_1] @@ -169,7 +169,7 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), mock_value_slot0.content().value(), "Value at the top of the stack should be equal to [1, 2, 3, 4]", ); @@ -208,9 +208,9 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.{get_map_item_foreign_root} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, MAP_KEY, pad(4)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, MAP_KEY, pad(4)] exec.tx::execute_foreign_procedure # => [MAP_VALUE] @@ -228,7 +228,7 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), STORAGE_LEAVES_2[0].1, "Value at the top of the stack should be equal [1, 2, 3, 4]", ); @@ -265,9 +265,9 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.{get_item_foreign_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure dropw # => [] @@ -284,9 +284,9 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.{get_item_foreign_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure @@ -430,9 +430,9 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { push.{get_item_foreign_1_hash} # push the foreign account ID - push.{foreign_1_suffix} push.{foreign_1_prefix} - # => [foreign_account_1_id_prefix, foreign_account_1_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_1_prefix} push.{foreign_1_suffix} + # => [foreign_account_1_id_suffix, foreign_account_1_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure dropw # => [] @@ -449,9 +449,9 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { push.{get_item_foreign_2_hash} # push the foreign account ID - push.{foreign_2_suffix} push.{foreign_2_prefix} - # => [foreign_account_2_id_prefix, foreign_account_2_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_2_prefix} push.{foreign_2_suffix} + # => [foreign_account_2_id_suffix, foreign_account_2_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure dropw # => [] @@ -468,9 +468,9 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { push.{get_item_foreign_1_hash} # push the foreign account ID - push.{foreign_1_suffix} push.{foreign_1_prefix} - # => [foreign_account_1_id_prefix, foreign_account_1_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_1_prefix} push.{foreign_1_suffix} + # => [foreign_account_1_id_suffix, foreign_account_1_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure @@ -543,7 +543,7 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { #! Gets an item from the active account storage. #! - #! Inputs: [slot_id_prefix, slot_id_suffix] + #! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE] pub proc get_item_foreign # make this foreign procedure unique to make sure that we invoke the procedure of the @@ -557,7 +557,7 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { #! Gets a map item from the active account storage. #! - #! Inputs: [slot_id_prefix, slot_id_suffix, KEY] + #! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] pub proc get_map_item_foreign # make this foreign procedure unique to make sure that we invoke the procedure of the @@ -625,22 +625,22 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { # push the slot name of desired storage item push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix, pad(16)] + # => [slot_id_suffix, slot_id_prefix, pad(16)] # get the hash of the `get_item_foreign` account procedure procref.::foreign_account::get_item_foreign - # => [FOREIGN_PROC_ROOT, slot_id_prefix, slot_id_suffix, pad(16)] + # => [FOREIGN_PROC_ROOT, slot_id_suffix, slot_id_prefix, pad(16)] # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT - # slot_id_prefix, slot_id_suffix, pad(16)]] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT + # slot_id_suffix, slot_id_prefix, pad(16)]] exec.tx::execute_foreign_procedure # => [STORAGE_VALUE, pad(14)] # assert the correctness of the obtained value - push.1.2.3.4 assert_eqw.err="foreign proc returned unexpected value" + push.{mock_value0} assert_eqw.err="foreign proc returned unexpected value (1)" # => [pad(16)] ### get the storage map item ###################################### @@ -655,15 +655,15 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { procref.::foreign_account::get_map_item_foreign # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, MAP_ITEM_KEY, pad(16)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, MAP_ITEM_KEY, pad(16)] exec.tx::execute_foreign_procedure # => [MAP_VALUE, pad(18)] # assert the correctness of the obtained value - push.1.2.3.4 assert_eqw.err="foreign proc returned unexpected value" + push.{mock_value0} assert_eqw.err="foreign proc returned unexpected value (2)" # => [pad(18)] ### assert foreign procedure inputs correctness ################### @@ -682,8 +682,8 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { # => [FOREIGN_PROC_ROOT, [1, 2, ..., 16], pad(16)] # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, # [1, 2, ..., 16], pad(18)] exec.tx::execute_foreign_procedure @@ -703,6 +703,7 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { end "#, mock_value_slot0 = mock_value_slot0.name(), + mock_value0 = mock_value_slot0.value(), mock_map_slot = mock_map_slot.name(), foreign_prefix = foreign_account.id().prefix().as_felt(), foreign_suffix = foreign_account.id().suffix(), @@ -740,7 +741,7 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu // Create two different assets. let fungible_asset = Asset::Fungible(FungibleAsset::new(fungible_faucet_id, 1)?); let non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new( - &NonFungibleAssetDetails::new(non_fungible_faucet_id.prefix(), vec![1, 2, 3])?, + &NonFungibleAssetDetails::new(non_fungible_faucet_id, vec![1, 2, 3])?, )?); let foreign_account_code_source = format!( @@ -749,12 +750,12 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu pub proc get_asset_balance # get balance of first asset - push.{fungible_faucet_id_suffix} push.{fungible_faucet_id_prefix} + push.{fungible_faucet_id_prefix} push.{fungible_faucet_id_suffix} exec.active_account::get_balance # => [balance] # check presence of non fungible asset - push.{non_fungible_asset_word} + push.{NON_FUNGIBLE_ASSET_KEY} exec.active_account::has_non_fungible_asset # => [has_asset, balance] @@ -769,7 +770,7 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu ", fungible_faucet_id_prefix = fungible_faucet_id.prefix().as_felt(), fungible_faucet_id_suffix = fungible_faucet_id.suffix(), - non_fungible_asset_word = Word::from(non_fungible_asset), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), ); let source_manager = Arc::new(DefaultSourceManager::default()); @@ -813,8 +814,8 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu procref.::foreign_account_code::get_asset_balance # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [has_asset_balance] @@ -862,7 +863,7 @@ async fn foreign_account_get_initial_balance() -> anyhow::Result<()> { pub proc get_initial_balance # push the faucet ID on the stack - push.{fungible_faucet_id_suffix} push.{fungible_faucet_id_prefix} + push.{fungible_faucet_id_prefix} push.{fungible_faucet_id_suffix} # get the initial balance of the asset associated with the provided faucet ID exec.active_account::get_balance @@ -919,8 +920,8 @@ async fn foreign_account_get_initial_balance() -> anyhow::Result<()> { procref.::foreign_account_code::get_initial_balance # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [init_foreign_balance] @@ -991,12 +992,12 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { push.MOCK_VALUE_SLOT1[0..2] # get the hash of the `get_item_foreign` account procedure from the advice stack - adv_push.4 + padw adv_loadw # push the foreign account ID from the advice stack adv_push.2 - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1007,7 +1008,7 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { # get the first element of the value0 storage slot (it should be 1) and add it to the # obtained foreign value. push.MOCK_VALUE_SLOT0[0..2] exec.active_account::get_item - drop drop drop + swap.3 drop drop drop add # assert that the resulting value equals 6 @@ -1051,11 +1052,11 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { # => [pad(15)] # get the hash of the `second_account_foreign_proc` account procedure from the advice stack - adv_push.4 + padw adv_loadw # push the ID of the second foreign account from the advice stack adv_push.2 - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1063,7 +1064,7 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { # get the second element of the value0 storage slot (it should be 2) and add it to the # obtained foreign value. push.MOCK_VALUE_SLOT0[0..2] exec.active_account::get_item - drop drop swap drop + drop swap.2 drop drop add # assert that the resulting value equals 8 @@ -1079,7 +1080,7 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { exec.active_account::get_item # return the first element of the resulting word - drop drop drop + swap.3 drop drop drop end "#, mock_value_slot0 = mock_value_slot0.name(), @@ -1127,16 +1128,16 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { .stack .extend(*second_foreign_account.code().procedures()[1].mast_root()); advice_inputs.stack.extend([ - second_foreign_account.id().suffix(), second_foreign_account.id().prefix().as_felt(), + second_foreign_account.id().suffix(), ]); advice_inputs .stack .extend(*first_foreign_account.code().procedures()[2].mast_root()); advice_inputs.stack.extend([ - first_foreign_account.id().suffix(), first_foreign_account.id().prefix().as_felt(), + first_foreign_account.id().suffix(), ]); let code = format!( @@ -1153,8 +1154,8 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { procref.::first_foreign_account::first_account_foreign_proc # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1244,8 +1245,8 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { procref.::foreign_account::second_account_foreign_proc # push the ID of the second foreign account - push.{second_foreign_suffix} push.{second_foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{second_foreign_prefix} push.{second_foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] # call the second foreign account exec.tx::execute_foreign_procedure @@ -1314,8 +1315,8 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { procref.::first_foreign_account::first_account_foreign_proc # push the first foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [result_from_second] @@ -1346,7 +1347,7 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { .await?; // Prove the executed transaction which uses FPI across two foreign accounts. - LocalTransactionProver::default().prove(executed_transaction)?; + LocalTransactionProver::default().prove(executed_transaction).await?; Ok(()) } @@ -1421,8 +1422,8 @@ async fn test_nested_fpi_stack_overflow() -> anyhow::Result<()> { push.{next_account_proc_hash} # push the foreign account ID - push.{next_foreign_suffix} push.{next_foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{next_foreign_prefix} push.{next_foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1498,8 +1499,8 @@ async fn test_nested_fpi_stack_overflow() -> anyhow::Result<()> { push.{foreign_account_proc_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1542,11 +1543,11 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { # => [pad(15)] # get the hash of the native account procedure from the advice stack - adv_push.4 + padw adv_loadw # push the ID of the native account from the advice stack adv_push.2 - # => [native_account_id_prefix, native_account_id_suffix, NATIVE_PROC_ROOT, pad(15)] + # => [native_account_id_suffix, native_account_id_prefix, NATIVE_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1594,8 +1595,8 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { push.{first_account_foreign_proc_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1622,7 +1623,7 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { advice_inputs.stack.extend(*native_account.code().procedures()[3].mast_root()); advice_inputs .stack - .extend([native_account.id().suffix(), native_account.id().prefix().as_felt()]); + .extend([native_account.id().prefix().as_felt(), native_account.id().suffix()]); let result = mock_chain .build_tx_context(native_account.id(), &[], &[]) @@ -1723,8 +1724,8 @@ async fn test_fpi_stale_account() -> anyhow::Result<()> { # => [FOREIGN_PROC_ROOT, pad(16)] # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(16)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(16)] exec.tx::execute_foreign_procedure end @@ -1750,15 +1751,15 @@ async fn test_fpi_get_account_id() -> anyhow::Result<()> { pub proc get_current_and_native_ids # get the ID of the current (foreign) account exec.active_account::get_id - # => [acct_id_prefix, acct_id_suffix, pad(16)] + # => [acct_id_suffix, acct_id_prefix, pad(16)] # get the ID of the native account exec.native_account::get_id - # => [native_acct_id_prefix, native_acct_id_suffix, acct_id_prefix, acct_id_suffix, pad(16)] + # => [native_acct_id_suffix, native_acct_id_prefix, acct_id_suffix, acct_id_prefix, pad(16)] # truncate the stack swapw dropw - # => [native_acct_id_prefix, native_acct_id_suffix, acct_id_prefix, acct_id_suffix, pad(12)] + # => [native_acct_id_suffix, native_acct_id_prefix, acct_id_suffix, acct_id_prefix, pad(12)] end "; @@ -1802,22 +1803,22 @@ async fn test_fpi_get_account_id() -> anyhow::Result<()> { procref.::foreign_account::get_current_and_native_ids # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure - # => [native_acct_id_prefix, native_acct_id_suffix, acct_id_prefix, acct_id_suffix] + # => [native_acct_id_suffix, native_acct_id_prefix, acct_id_suffix, acct_id_prefix] # push the expected native account ID and check that it is equal to the one returned # from the FPI - push.{expected_native_suffix} push.{expected_native_prefix} + push.{expected_native_prefix} push.{expected_native_suffix} exec.account_id::is_equal assert.err="native account ID returned from the FPI is not equal to the expected one" - # => [acct_id_prefix, acct_id_suffix] + # => [acct_id_suffix, acct_id_prefix] # push the expected foreign account ID and check that it is equal to the one returned # from the FPI - push.{foreign_suffix} push.{foreign_prefix} + push.{foreign_prefix} push.{foreign_suffix} exec.account_id::is_equal assert.err="foreign account ID returned from the FPI is not equal to the expected one" # => [] @@ -1921,7 +1922,7 @@ async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() - padw padw padw push.0.0.0 # => [pad(15)] procref.::foreign_account::test_get_initial_item - push.{foreign_account_id_suffix} push.{foreign_account_id_prefix} + push.{foreign_account_id_prefix} push.{foreign_account_id_suffix} exec.tx::execute_foreign_procedure push.{expected_value_slot_0} assert_eqw.err="foreign account get_initial_item should work" @@ -1931,7 +1932,7 @@ async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() - push.{map_key} push.MOCK_MAP_SLOT[0..2] procref.::foreign_account::test_get_initial_map_item - push.{foreign_account_id_suffix} push.{foreign_account_id_prefix} + push.{foreign_account_id_prefix} push.{foreign_account_id_suffix} exec.tx::execute_foreign_procedure push.{map_value} assert_eqw.err="foreign account get_initial_map_item should work" diff --git a/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs index aaf943ad8c..51d746748c 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs @@ -2,6 +2,7 @@ use alloc::string::String; use miden_protocol::Word; use miden_protocol::note::Note; +use miden_protocol::transaction::memory::{ASSET_SIZE, ASSET_VALUE_OFFSET}; use miden_standards::code_builder::CodeBuilder; use super::{TestSetup, setup_test}; @@ -168,16 +169,16 @@ async fn test_get_sender() -> anyhow::Result<()> { # get the sender from the input note push.0 exec.input_note::get_sender - # => [sender_id_prefix, sender_id_suffix] - - # assert the correctness of the prefix - push.{sender_prefix} - assert_eq.err="sender id prefix of the note 0 is incorrect" - # => [sender_id_suffix] + # => [sender_id_suffix, sender_id_prefix] # assert the correctness of the suffix push.{sender_suffix} assert_eq.err="sender id suffix of the note 0 is incorrect" + # => [sender_id_prefix] + + # assert the correctness of the prefix + push.{sender_prefix} + assert_eq.err="sender id prefix of the note 0 is incorrect" # => [] end "#, @@ -234,20 +235,32 @@ async fn test_get_assets() -> anyhow::Result<()> { for (asset_index, asset) in note.assets().iter().enumerate() { check_assets_code.push_str(&format!( r#" - # load the asset stored in memory - padw dup.4 mem_loadw_be - # => [STORED_ASSET, dest_ptr, note_index] + # load the asset key stored in memory + padw dup.4 mem_loadw_le + # => [STORED_ASSET_KEY, dest_ptr, note_index] + + # assert the asset key matches + push.{NOTE_ASSET_KEY} + assert_eqw.err="expected asset key at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_KEY}" + # => [dest_ptr, note_index] + + # load the asset value stored in memory + padw dup.4 add.{ASSET_VALUE_OFFSET} mem_loadw_le + # => [STORED_ASSET_VALUE, dest_ptr, note_index] - # assert the asset - push.{NOTE_ASSET} - assert_eqw.err="asset {asset_index} of the note {note_index} is incorrect" + # assert the asset value matches + push.{NOTE_ASSET_VALUE} + assert_eqw.err="expected asset value at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_VALUE}" # => [dest_ptr, note_index] # move the pointer - add.4 - # => [dest_ptr+4, note_index] + add.{ASSET_SIZE} + # => [dest_ptr+ASSET_SIZE, note_index] "#, - NOTE_ASSET = Word::from(*asset), + NOTE_ASSET_KEY = asset.to_key_word(), + NOTE_ASSET_VALUE = asset.to_value_word(), asset_index = asset_index, note_index = note_index, )); @@ -272,8 +285,8 @@ async fn test_get_assets() -> anyhow::Result<()> { end ", check_note_0 = check_assets_code(0, 0, &p2id_note_0_assets), - check_note_1 = check_assets_code(1, 4, &p2id_note_1_asset), - check_note_2 = check_assets_code(2, 8, &p2id_note_2_assets), + check_note_1 = check_assets_code(1, 8, &p2id_note_1_asset), + check_note_2 = check_assets_code(2, 16, &p2id_note_2_assets), ); let tx_script = CodeBuilder::default().compile_tx_script(code)?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs b/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs index cf0216fa6d..b30c909220 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs @@ -3,7 +3,7 @@ //! Once lazy loading is enabled generally, it can be removed and/or integrated into other tests. use miden_protocol::LexicographicWord; -use miden_protocol::account::{AccountId, AccountStorage, StorageSlotDelta}; +use miden_protocol::account::{AccountId, AccountStorage, StorageMapKey, StorageSlotDelta}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_NATIVE_ASSET_FAUCET, @@ -43,15 +43,19 @@ async fn adding_fungible_assets_with_lazy_loading_succeeds() -> anyhow::Result<( use mock::account begin - push.{FUNGIBLE_ASSET1} - call.account::add_asset dropw + push.{FUNGIBLE_ASSET_VALUE1} + push.{FUNGIBLE_ASSET_KEY1} + call.account::add_asset dropw dropw - push.{FUNGIBLE_ASSET2} - call.account::add_asset dropw + push.{FUNGIBLE_ASSET_VALUE2} + push.{FUNGIBLE_ASSET_KEY2} + call.account::add_asset dropw dropw end ", - FUNGIBLE_ASSET1 = Word::from(fungible_asset1), - FUNGIBLE_ASSET2 = Word::from(fungible_asset2) + FUNGIBLE_ASSET_KEY1 = fungible_asset1.to_key_word(), + FUNGIBLE_ASSET_VALUE1 = fungible_asset1.to_value_word(), + FUNGIBLE_ASSET_KEY2 = fungible_asset2.to_key_word(), + FUNGIBLE_ASSET_VALUE2 = fungible_asset2.to_value_word() ); let builder = CodeBuilder::with_mock_libraries(); @@ -91,25 +95,37 @@ async fn removing_fungible_assets_with_lazy_loading_succeeds() -> anyhow::Result use mock::util begin - push.{FUNGIBLE_ASSET1} + push.{FUNGIBLE_ASSET1_VALUE} + push.{FUNGIBLE_ASSET1_KEY} call.account::remove_asset + # drop the excess words from the call + dropw dropw # => [] # move asset to note to adhere to asset preservation rules + push.{FUNGIBLE_ASSET1_VALUE} + push.{FUNGIBLE_ASSET1_KEY} exec.util::create_default_note_with_asset # => [] - push.{FUNGIBLE_ASSET2} + push.{FUNGIBLE_ASSET2_VALUE} + push.{FUNGIBLE_ASSET2_KEY} call.account::remove_asset - # => [ASSET] + # drop the excess words from the call + dropw dropw + # => [] # move asset to note to adhere to asset preservation rules + push.{FUNGIBLE_ASSET2_VALUE} + push.{FUNGIBLE_ASSET2_KEY} exec.util::create_default_note_with_asset # => [] end ", - FUNGIBLE_ASSET1 = Word::from(fungible_asset1), - FUNGIBLE_ASSET2 = Word::from(fungible_asset2) + FUNGIBLE_ASSET1_KEY = fungible_asset1.to_key_word(), + FUNGIBLE_ASSET1_VALUE = fungible_asset1.to_value_word(), + FUNGIBLE_ASSET2_KEY = fungible_asset2.to_key_word(), + FUNGIBLE_ASSET2_VALUE = fungible_asset2.to_value_word(), ); let builder = CodeBuilder::with_mock_libraries(); @@ -171,9 +187,9 @@ async fn setting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { let mock_map = AccountStorage::mock_map(); let existing_key = *mock_map.entries().next().unwrap().0; - let non_existent_key = Word::from([5, 5, 5, 5u32]); + let non_existent_key = StorageMapKey::from_array([5, 5, 5, 5u32]); assert!( - mock_map.open(&non_existent_key).get(&non_existent_key).unwrap() == Word::empty(), + mock_map.open(&non_existent_key).get(non_existent_key).unwrap() == Word::empty(), "test setup requires that the non existent key does not exist" ); @@ -194,14 +210,14 @@ async fn setting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { push.{value0} push.{existing_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.account::set_map_item # Insert a non-existent key. push.{value1} push.{non_existent_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.account::set_map_item exec.::miden::core::sys::truncate_stack @@ -243,9 +259,9 @@ async fn getting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { let mock_map = AccountStorage::mock_map(); let (existing_key, existing_value) = mock_map.entries().next().unwrap(); - let non_existent_key = Word::from([5, 5, 5, 5u32]); + let non_existent_key = StorageMapKey::from_array([5, 5, 5, 5u32]); assert!( - mock_map.open(&non_existent_key).get(&non_existent_key).unwrap() == Word::empty(), + mock_map.open(&non_existent_key).get(non_existent_key).unwrap() == Word::empty(), "test setup requires that the non existent key does not exist" ); @@ -262,7 +278,7 @@ async fn getting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { # Fetch value from existing key. push.{existing_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY] + # => [slot_id_suffix, slot_id_prefix, KEY] call.account::get_map_item push.{existing_value} @@ -271,7 +287,7 @@ async fn getting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { # Fetch a non-existent key. push.{non_existent_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY] + # => [slot_id_suffix, slot_id_prefix, KEY] call.account::get_map_item padw assert_eqw.err="non-existent value should be the empty word" diff --git a/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs b/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs index fca31cf22a..58984af5df 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs @@ -3,11 +3,11 @@ use std::collections::BTreeMap; use std::string::String; use anyhow::Context; +use miden_crypto::rand::test_utils::rand_value; use miden_processor::{ONE, ZERO}; -use miden_protocol::{EMPTY_WORD, LexicographicWord, Word}; +use miden_protocol::{EMPTY_WORD, Felt, LexicographicWord, Word}; use miden_tx::{LinkMap, MemoryViewer}; use rand::seq::IteratorRandom; -use winter_rand_utils::rand_value; use crate::TransactionContextBuilder; @@ -176,7 +176,7 @@ async fn insertion() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await.context("failed to execute code")?; let mem_viewer = MemoryViewer::ExecutionOutputs(&exec_output); - let map = LinkMap::new(map_ptr.into(), &mem_viewer); + let map = LinkMap::new(Felt::from(map_ptr), &mem_viewer); let mut map_iter = map.iter(); let entry0 = map_iter.next().expect("map should have four entries"); @@ -546,7 +546,7 @@ async fn execute_link_map_test(operations: Vec) -> anyhow::Result let mem_viewer = MemoryViewer::ExecutionOutputs(&exec_output); for (map_ptr, control_map) in control_maps { - let map = LinkMap::new(map_ptr.into(), &mem_viewer); + let map = LinkMap::new(Felt::from(map_ptr), &mem_viewer); let actual_map_len = map.iter().count(); assert_eq!( actual_map_len, diff --git a/crates/miden-testing/src/kernel_tests/tx/test_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_note.rs index ddd095dc23..4e5f301c2f 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_note.rs @@ -2,13 +2,13 @@ use alloc::collections::BTreeMap; use alloc::sync::Arc; use anyhow::Context; -use miden_processor::fast::ExecutionOutput; +use miden_processor::ExecutionOutput; use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::{AccountBuilder, AccountId}; use miden_protocol::assembly::DefaultSourceManager; use miden_protocol::asset::FungibleAsset; -use miden_protocol::crypto::dsa::falcon512_rpo::SecretKey; -use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; +use miden_protocol::crypto::dsa::falcon512_poseidon2::SecretKey; +use miden_protocol::crypto::rand::{FeltRng, RandomCoin}; use miden_protocol::errors::MasmError; use miden_protocol::note::{ Note, @@ -24,8 +24,8 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_SENDER, }; use miden_protocol::transaction::memory::ACTIVE_INPUT_NOTE_PTR; -use miden_protocol::transaction::{OutputNote, TransactionArgs}; -use miden_protocol::{Felt, Word, ZERO}; +use miden_protocol::transaction::{RawOutputNote, TransactionArgs}; +use miden_protocol::{Felt, Word}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::note::NoteBuilder; @@ -46,8 +46,9 @@ use crate::{ async fn test_note_setup() -> anyhow::Result<()> { let tx_context = { let mut builder = MockChain::builder(); - let account = builder - .add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -70,7 +71,7 @@ async fn test_note_setup() -> anyhow::Result<()> { exec.prologue::prepare_transaction exec.note::prepare_note # => [note_script_root_ptr, NOTE_ARGS, pad(11), pad(16)] - padw movup.4 mem_loadw_be + padw movup.4 mem_loadw_le # => [SCRIPT_ROOT, NOTE_ARGS, pad(11), pad(16)] # truncate the stack @@ -89,8 +90,9 @@ async fn test_note_setup() -> anyhow::Result<()> { async fn test_note_script_and_note_args() -> anyhow::Result<()> { let mut tx_context = { let mut builder = MockChain::builder(); - let account = builder - .add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -155,22 +157,21 @@ async fn test_note_script_and_note_args() -> anyhow::Result<()> { tx_context.set_tx_args(tx_args); let exec_output = tx_context.execute_code(code).await.unwrap(); - assert_eq!(exec_output.get_stack_word_be(0), note_args[0]); - assert_eq!(exec_output.get_stack_word_be(4), note_args[1]); + assert_eq!(exec_output.get_stack_word(0), note_args[0]); + assert_eq!(exec_output.get_stack_word(4), note_args[1]); Ok(()) } fn note_setup_stack_assertions(exec_output: &ExecutionOutput, inputs: &TransactionContext) { - let mut expected_stack = [ZERO; 16]; - - // replace the top four elements with the tx script root - let mut note_script_root = *inputs.input_notes().get_note(0).note().script().root(); - note_script_root.reverse(); - expected_stack[..4].copy_from_slice(¬e_script_root); - // assert that the stack contains the note storage at the end of execution - assert_eq!(exec_output.stack.as_slice(), expected_stack.as_slice()) + assert_eq!( + exec_output.get_stack_word(0), + inputs.input_notes().get_note(0).note().script().root() + ); + assert_eq!(exec_output.get_stack_word(4), Word::empty()); + assert_eq!(exec_output.get_stack_word(8), Word::empty()); + assert_eq!(exec_output.get_stack_word(12), Word::empty()); } fn note_setup_memory_assertions(exec_output: &ExecutionOutput) { @@ -201,27 +202,27 @@ async fn test_build_recipient() -> anyhow::Result<()> { begin # put the values that will be hashed into the memory - push.{word_1} push.{base_addr} mem_storew_be dropw - push.{word_2} push.{addr_1} mem_storew_be dropw + push.{word_1} push.{base_addr} mem_storew_le dropw + push.{word_2} push.{addr_1} mem_storew_le dropw # Test with 4 values (needs padding to 8) push.{script_root} # SCRIPT_ROOT push.{serial_num} # SERIAL_NUM - push.4.4000 # num_storage_items, storage_ptr + push.4.{base_addr} # num_storage_items, storage_ptr exec.note::build_recipient # => [RECIPIENT_4] # Test with 5 values (needs padding to 8) push.{script_root} # SCRIPT_ROOT push.{serial_num} # SERIAL_NUM - push.5.4000 # num_storage_items, storage_ptr + push.5.{base_addr} # num_storage_items, storage_ptr exec.note::build_recipient # => [RECIPIENT_5, RECIPIENT_4] # Test with 8 values (no padding needed - exactly one rate block) push.{script_root} # SCRIPT_ROOT push.{serial_num} # SERIAL_NUM - push.8.4000 # num_storage_items, storage_ptr + push.8.{base_addr} # num_storage_items, storage_ptr exec.note::build_recipient # => [RECIPIENT_8, RECIPIENT_5, RECIPIENT_4] @@ -256,26 +257,23 @@ async fn test_build_recipient() -> anyhow::Result<()> { let recipient_5 = NoteRecipient::new(serial_num, note_script.clone(), note_storage_5.clone()); let recipient_8 = NoteRecipient::new(serial_num, note_script.clone(), note_storage_8.clone()); - for note_storage in [ + for (note_storage, storage_elements) in [ (note_storage_4, inputs_4.clone()), (note_storage_5, inputs_5.clone()), (note_storage_8, inputs_8.clone()), ] { - let inputs_advice_map_key = note_storage.0.commitment(); + let inputs_advice_map_key = note_storage.commitment(); assert_eq!( exec_output.advice.get_mapped_values(&inputs_advice_map_key).unwrap(), - note_storage.1, + storage_elements, "advice entry with note storage should contain the unpadded values" ); } - let mut expected_stack = alloc::vec::Vec::new(); - expected_stack.extend_from_slice(recipient_4.digest().as_elements()); - expected_stack.extend_from_slice(recipient_5.digest().as_elements()); - expected_stack.extend_from_slice(recipient_8.digest().as_elements()); - expected_stack.reverse(); + assert_eq!(exec_output.get_stack_word(0), recipient_8.digest()); + assert_eq!(exec_output.get_stack_word(4), recipient_5.digest()); + assert_eq!(exec_output.get_stack_word(8), recipient_4.digest()); - assert_eq!(exec_output.stack[0..12], expected_stack); Ok(()) } @@ -298,28 +296,28 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { begin # put the values that will be hashed into the memory - push.{word_1} push.{base_addr} mem_storew_be dropw - push.{word_2} push.{addr_1} mem_storew_be dropw - push.{word_3} push.{addr_2} mem_storew_be dropw - push.{word_4} push.{addr_3} mem_storew_be dropw + push.{word_1} push.{base_addr} mem_storew_le dropw + push.{word_2} push.{addr_1} mem_storew_le dropw + push.{word_3} push.{addr_2} mem_storew_le dropw + push.{word_4} push.{addr_3} mem_storew_le dropw # push the number of values and pointer to the storage on the stack - push.5.4000 + push.5.{base_addr} # execute the `compute_storage_commitment` procedure for 5 values exec.note::compute_storage_commitment # => [HASH_5] - push.8.4000 + push.8.{base_addr} # execute the `compute_storage_commitment` procedure for 8 values exec.note::compute_storage_commitment # => [HASH_8, HASH_5] - push.15.4000 + push.15.{base_addr} # execute the `compute_storage_commitment` procedure for 15 values exec.note::compute_storage_commitment # => [HASH_15, HASH_8, HASH_5] - push.0.4000 + push.0.{base_addr} # check that calling `compute_storage_commitment` procedure with 0 elements will result in an # empty word exec.note::compute_storage_commitment @@ -355,15 +353,11 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { inputs_15.extend_from_slice(&word_4[0..3]); let note_storage_15_hash = NoteStorage::new(inputs_15)?.commitment(); - let mut expected_stack = alloc::vec::Vec::new(); - - expected_stack.extend_from_slice(note_storage_5_hash.as_elements()); - expected_stack.extend_from_slice(note_storage_8_hash.as_elements()); - expected_stack.extend_from_slice(note_storage_15_hash.as_elements()); - expected_stack.extend_from_slice(Word::empty().as_elements()); - expected_stack.reverse(); + assert_eq!(exec_output.get_stack_word(0), Word::empty()); + assert_eq!(exec_output.get_stack_word(4), note_storage_15_hash); + assert_eq!(exec_output.get_stack_word(8), note_storage_8_hash); + assert_eq!(exec_output.get_stack_word(12), note_storage_5_hash); - assert_eq!(exec_output.stack[0..16], expected_stack); Ok(()) } @@ -401,7 +395,7 @@ async fn test_build_metadata_header() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await?; - let metadata_word = exec_output.get_stack_word_be(0); + let metadata_word = exec_output.get_stack_word(0); assert_eq!( test_metadata.to_header_word(), @@ -458,7 +452,7 @@ pub async fn test_timelock() -> anyhow::Result<()> { .dynamically_linked_libraries(CodeBuilder::mock_libraries()) .build()?; - builder.add_output_note(OutputNote::Full(timelock_note.clone())); + builder.add_output_note(RawOutputNote::Full(timelock_note.clone())); let mut mock_chain = builder.build()?; mock_chain @@ -488,7 +482,7 @@ pub async fn test_timelock() -> anyhow::Result<()> { Ok(()) } -/// This test checks the scenario when some public key, which is provided to the RPO component of +/// This test checks the scenario when some public key, which is provided to the auth component of /// the target account, is also provided as an input to the input note. /// /// Previously this setup was leading to the values collision in the advice map, see the @@ -497,13 +491,15 @@ pub async fn test_timelock() -> anyhow::Result<()> { async fn test_public_key_as_note_input() -> anyhow::Result<()> { let mut rng = ChaCha20Rng::from_seed(Default::default()); let sec_key = SecretKey::with_rng(&mut rng); - // this value will be used both as public key in the RPO component of the target account and as + // this value will be used both as public key in the auth component of the target account and as // well as the input of the input note let public_key = PublicKeyCommitment::from(sec_key.public_key()); let public_key_value = Word::from(public_key); - let (rpo_component, authenticator) = - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }.build_component(); + let (rpo_component, authenticator) = Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + } + .build_component(); let mock_seed_1 = Word::from([1, 2, 3, 4u32]).as_bytes(); let target_account = AccountBuilder::new(mock_seed_1) @@ -518,7 +514,7 @@ async fn test_public_key_as_note_input() -> anyhow::Result<()> { .with_component(BasicWallet) .build_existing()?; - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(target_account.id()); let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![])?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs index ca50745940..fbcc0cd51d 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs @@ -1,18 +1,15 @@ use alloc::string::String; -use alloc::vec::Vec; -use anyhow::Context; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId}; use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; -use miden_protocol::crypto::rand::RpoRandomCoin; +use miden_protocol::crypto::rand::RandomCoin; use miden_protocol::errors::tx_kernel::{ ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS, ERR_TX_NUMBER_OF_OUTPUT_NOTES_EXCEEDS_LIMIT, }; use miden_protocol::note::{ Note, - NoteAssets, NoteAttachment, NoteAttachmentScheme, NoteMetadata, @@ -34,18 +31,27 @@ use miden_protocol::testing::account_id::{ }; use miden_protocol::testing::constants::NON_FUNGIBLE_ASSET_DATA_2; use miden_protocol::transaction::memory::{ + ASSET_SIZE, + ASSET_VALUE_OFFSET, NOTE_MEM_SIZE, NUM_OUTPUT_NOTES_PTR, OUTPUT_NOTE_ASSETS_OFFSET, OUTPUT_NOTE_ATTACHMENT_OFFSET, OUTPUT_NOTE_METADATA_HEADER_OFFSET, + OUTPUT_NOTE_NUM_ASSETS_OFFSET, OUTPUT_NOTE_RECIPIENT_OFFSET, OUTPUT_NOTE_SECTION_OFFSET, }; -use miden_protocol::transaction::{OutputNote, OutputNotes}; +use miden_protocol::transaction::{RawOutputNote, RawOutputNotes}; use miden_protocol::{Felt, Word, ZERO}; use miden_standards::code_builder::CodeBuilder; -use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint, P2idNote}; +use miden_standards::note::{ + AccountTargetNetworkNote, + NetworkAccountTarget, + NetworkNoteExt, + NoteExecutionHint, + P2idNote, +}; use miden_standards::testing::mock_account::MockAccountExt; use miden_standards::testing::note::NoteBuilder; @@ -201,80 +207,45 @@ async fn test_create_note_too_many_notes() -> anyhow::Result<()> { #[tokio::test] async fn test_get_output_notes_commitment() -> anyhow::Result<()> { - let tx_context = { - let account = - Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); - - let output_note_1 = - create_public_p2any_note(ACCOUNT_ID_SENDER.try_into()?, [FungibleAsset::mock(100)]); - - let input_note_1 = create_public_p2any_note( - ACCOUNT_ID_PRIVATE_SENDER.try_into()?, - [FungibleAsset::mock(100)], - ); - - let input_note_2 = create_public_p2any_note( - ACCOUNT_ID_PRIVATE_SENDER.try_into()?, - [FungibleAsset::mock(200)], - ); + let mut rng = RandomCoin::new(Word::from([1, 2, 3, 4u32])); + let account = Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); - TransactionContextBuilder::new(account) - .extend_input_notes(vec![input_note_1, input_note_2]) - .extend_expected_output_notes(vec![OutputNote::Full(output_note_1)]) - .build()? - }; + let asset_1 = FungibleAsset::mock(100); + let asset_2 = FungibleAsset::mock(200); - // extract input note data - let input_note_1 = tx_context.tx_inputs().input_notes().get_note(0).note(); - let input_asset_1 = **input_note_1 - .assets() - .iter() - .take(1) - .collect::>() - .first() - .context("getting first expected input asset")?; - let input_note_2 = tx_context.tx_inputs().input_notes().get_note(1).note(); - let input_asset_2 = **input_note_2 - .assets() - .iter() - .take(1) - .collect::>() - .first() - .context("getting second expected input asset")?; - - // Choose random accounts as the target for the note tag. - let network_account = AccountId::try_from(ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET)?; - let local_account = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET)?; + let input_note_1 = create_public_p2any_note(ACCOUNT_ID_PRIVATE_SENDER.try_into()?, [asset_1]); + let input_note_2 = create_public_p2any_note(ACCOUNT_ID_PRIVATE_SENDER.try_into()?, [asset_2]); // create output note 1 - let output_serial_no_1 = Word::from([8u32; 4]); - let output_tag_1 = NoteTag::with_account_target(network_account); - let assets = NoteAssets::new(vec![input_asset_1])?; - let metadata = NoteMetadata::new(tx_context.tx_inputs().account().id(), NoteType::Public) - .with_tag(output_tag_1); - let inputs = NoteStorage::new(vec![])?; - let recipient = NoteRecipient::new(output_serial_no_1, input_note_1.script().clone(), inputs); - let output_note_1 = Note::new(assets, metadata, recipient); + let output_note_1 = NoteBuilder::new(account.id(), &mut rng) + .tag(NoteTag::with_account_target(account.id()).as_u32()) + .note_type(NoteType::Public) + .add_assets([asset_1]) + .build()?; // create output note 2 - let output_serial_no_2 = Word::from([11u32; 4]); - let output_tag_2 = NoteTag::with_account_target(local_account); - let assets = NoteAssets::new(vec![input_asset_2])?; - let attachment = NoteAttachment::new_array( - NoteAttachmentScheme::new(5), - [42, 43, 44, 45, 46u32].map(Felt::from).to_vec(), - )?; - let metadata = NoteMetadata::new(tx_context.tx_inputs().account().id(), NoteType::Public) - .with_tag(output_tag_2) - .with_attachment(attachment); - let inputs = NoteStorage::new(vec![])?; - let recipient = NoteRecipient::new(output_serial_no_2, input_note_2.script().clone(), inputs); - let output_note_2 = Note::new(assets, metadata, recipient); + let output_note_2 = NoteBuilder::new(account.id(), &mut rng) + .tag(NoteTag::with_custom_account_target(account.id(), 2)?.as_u32()) + .note_type(NoteType::Public) + .add_assets([asset_2]) + .attachment(NoteAttachment::new_array( + NoteAttachmentScheme::new(5), + [42, 43, 44, 45, 46u32].map(Felt::from).to_vec(), + )?) + .build()?; + + let tx_context = TransactionContextBuilder::new(account) + .extend_input_notes(vec![input_note_1.clone(), input_note_2.clone()]) + .extend_expected_output_notes(vec![ + RawOutputNote::Full(output_note_1.clone()), + RawOutputNote::Full(output_note_2.clone()), + ]) + .build()?; // compute expected output notes commitment - let expected_output_notes_commitment = OutputNotes::new(vec![ - OutputNote::Full(output_note_1.clone()), - OutputNote::Full(output_note_2.clone()), + let expected_output_notes_commitment = RawOutputNotes::new(vec![ + RawOutputNote::Full(output_note_1.clone()), + RawOutputNote::Full(output_note_2.clone()), ])? .commitment(); @@ -298,7 +269,8 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - push.{asset_1} + push.{ASSET_1_VALUE} + push.{ASSET_1_KEY} exec.output_note::add_asset # => [] @@ -309,7 +281,9 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - dup push.{asset_2} + dup + push.{ASSET_2_VALUE} + push.{ASSET_2_KEY} exec.output_note::add_asset # => [note_idx] @@ -332,14 +306,12 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { PUBLIC_NOTE = NoteType::Public as u8, recipient_1 = output_note_1.recipient().digest(), tag_1 = output_note_1.metadata().tag(), - asset_1 = Word::from( - **output_note_1.assets().iter().take(1).collect::>().first().unwrap() - ), + ASSET_1_KEY = asset_1.to_key_word(), + ASSET_1_VALUE = asset_1.to_value_word(), recipient_2 = output_note_2.recipient().digest(), tag_2 = output_note_2.metadata().tag(), - asset_2 = Word::from( - **output_note_2.assets().iter().take(1).collect::>().first().unwrap() - ), + ASSET_2_KEY = asset_2.to_key_word(), + ASSET_2_VALUE = asset_2.to_value_word(), ATTACHMENT2 = output_note_2.metadata().to_attachment_word(), attachment_scheme2 = output_note_2.metadata().attachment().attachment_scheme().as_u32(), ); @@ -378,7 +350,7 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { "Validate the output note 2 attachment", ); - assert_eq!(exec_output.get_stack_word_be(0), expected_output_notes_commitment); + assert_eq!(exec_output.get_stack_word(0), expected_output_notes_commitment); Ok(()) } @@ -389,7 +361,7 @@ async fn test_create_note_and_add_asset() -> anyhow::Result<()> { let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; let recipient = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::with_account_target(faucet_id); - let asset = Word::from(FungibleAsset::new(faucet_id, 10)?); + let asset = FungibleAsset::new(faucet_id, 10)?; let code = format!( " @@ -411,8 +383,9 @@ async fn test_create_note_and_add_asset() -> anyhow::Result<()> { dup assertz.err=\"index of the created note should be zero\" # => [note_idx] - push.{asset} - # => [ASSET, note_idx] + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx] call.output_note::add_asset # => [] @@ -424,15 +397,21 @@ async fn test_create_note_and_add_asset() -> anyhow::Result<()> { recipient = recipient, PUBLIC_NOTE = NoteType::Public as u8, tag = tag, - asset = asset, + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET), - asset, - "asset must be stored at the correct memory location", + asset.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + 4), + asset.to_value_word(), + "asset value must be stored at the correct memory location", ); Ok(()) @@ -448,13 +427,12 @@ async fn test_create_note_and_add_multiple_assets() -> anyhow::Result<()> { let recipient = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::with_account_target(faucet_2); - let asset = Word::from(FungibleAsset::new(faucet, 10)?); - let asset_2 = Word::from(FungibleAsset::new(faucet_2, 20)?); - let asset_3 = Word::from(FungibleAsset::new(faucet_2, 30)?); - let asset_2_and_3 = Word::from(FungibleAsset::new(faucet_2, 50)?); + let asset = FungibleAsset::new(faucet, 10)?; + let asset_2 = FungibleAsset::new(faucet_2, 20)?; + let asset_3 = FungibleAsset::new(faucet_2, 30)?; + let asset_2_plus_3 = FungibleAsset::new(faucet_2, 50)?; let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); - let non_fungible_asset_encoded = Word::from(non_fungible_asset); let code = format!( " @@ -474,20 +452,27 @@ async fn test_create_note_and_add_multiple_assets() -> anyhow::Result<()> { dup assertz.err=\"index of the created note should be zero\" # => [note_idx] - dup push.{asset} - call.output_note::add_asset + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + exec.output_note::add_asset # => [note_idx] - dup push.{asset_2} - call.output_note::add_asset + dup + push.{ASSET2_VALUE} + push.{ASSET2_KEY} + exec.output_note::add_asset # => [note_idx] - dup push.{asset_3} - call.output_note::add_asset + dup + push.{ASSET3_VALUE} + push.{ASSET3_KEY} + exec.output_note::add_asset # => [note_idx] - push.{nft} - call.output_note::add_asset + push.{ASSET4_VALUE} + push.{ASSET4_KEY} + exec.output_note::add_asset # => [] # truncate the stack @@ -497,30 +482,73 @@ async fn test_create_note_and_add_multiple_assets() -> anyhow::Result<()> { recipient = recipient, PUBLIC_NOTE = NoteType::Public as u8, tag = tag, - asset = asset, - asset_2 = asset_2, - asset_3 = asset_3, - nft = non_fungible_asset_encoded, + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), + ASSET2_KEY = asset_2.to_key_word(), + ASSET2_VALUE = asset_2.to_value_word(), + ASSET3_KEY = asset_3.to_key_word(), + ASSET3_VALUE = asset_3.to_value_word(), + ASSET4_KEY = non_fungible_asset.to_key_word(), + ASSET4_VALUE = non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; + assert_eq!( + exec_output + .get_kernel_mem_element(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_NUM_ASSETS_OFFSET) + .as_canonical_u64(), + 3, + "unexpected number of assets in output note", + ); + assert_eq!( exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET), - asset, - "asset must be stored at the correct memory location", + asset.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + ASSET_VALUE_OFFSET + ), + asset.to_value_word(), + "asset value must be stored at the correct memory location", ); assert_eq!( - exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + 4), - asset_2_and_3, - "asset_2 and asset_3 must be stored at the same correct memory location", + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + ASSET_SIZE + ), + asset_2_plus_3.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + + OUTPUT_NOTE_ASSETS_OFFSET + + ASSET_SIZE + + ASSET_VALUE_OFFSET + ), + asset_2_plus_3.to_value_word(), + "asset value must be stored at the correct memory location", ); assert_eq!( - exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + 8), - non_fungible_asset_encoded, - "non_fungible_asset must be stored at the correct memory location", + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + ASSET_SIZE * 2 + ), + non_fungible_asset.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + + OUTPUT_NOTE_ASSETS_OFFSET + + ASSET_SIZE * 2 + + ASSET_VALUE_OFFSET + ), + non_fungible_asset.to_value_word(), + "asset value must be stored at the correct memory location", ); Ok(()) @@ -533,7 +561,6 @@ async fn test_create_note_and_add_same_nft_twice() -> anyhow::Result<()> { let recipient = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::new(999 << 16 | 777); let non_fungible_asset = NonFungibleAsset::mock(&[1, 2, 3]); - let encoded = Word::from(non_fungible_asset); let code = format!( " @@ -550,13 +577,16 @@ async fn test_create_note_and_add_same_nft_twice() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - dup push.{nft} - # => [NFT, note_idx, note_idx] + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] exec.output_note::add_asset # => [note_idx] - push.{nft} + push.{ASSET_VALUE} + push.{ASSET_KEY} exec.output_note::add_asset # => [] end @@ -564,7 +594,8 @@ async fn test_create_note_and_add_same_nft_twice() -> anyhow::Result<()> { recipient = recipient, PUBLIC_NOTE = NoteType::Public as u8, tag = tag, - nft = encoded, + ASSET_KEY = non_fungible_asset.to_key_word(), + ASSET_VALUE = non_fungible_asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -707,7 +738,9 @@ async fn test_get_asset_info() -> anyhow::Result<()> { ); let account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset_0, fungible_asset_1], )?; @@ -719,7 +752,7 @@ async fn test_get_asset_info() -> anyhow::Result<()> { vec![fungible_asset_0], NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), )?; let output_note_1 = P2idNote::create( @@ -728,7 +761,7 @@ async fn test_get_asset_info() -> anyhow::Result<()> { vec![fungible_asset_0, fungible_asset_1], NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([4, 3, 2, 1u32])), + &mut RandomCoin::new(Word::from([4, 3, 2, 1u32])), )?; let tx_script_src = &format!( @@ -745,9 +778,10 @@ async fn test_get_asset_info() -> anyhow::Result<()> { # => [note_idx] # move the asset 0 to the note - push.{asset_0} + dup + push.{ASSET_0_VALUE} + push.{ASSET_0_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] # get the assets hash and assets number of the note having only asset_0 @@ -774,9 +808,10 @@ async fn test_get_asset_info() -> anyhow::Result<()> { # => [note_idx] # add asset_1 to the note - push.{asset_1} + dup + push.{ASSET_1_VALUE} + push.{ASSET_1_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] # get the assets hash and assets number of the note having asset_0 and asset_1 @@ -801,12 +836,14 @@ async fn test_get_asset_info() -> anyhow::Result<()> { RECIPIENT = output_note_1.recipient().digest(), note_type = NoteType::Public as u8, tag = output_note_1.metadata().tag(), - asset_0 = Word::from(fungible_asset_0), + ASSET_0_VALUE = fungible_asset_0.to_value_word(), + ASSET_0_KEY = fungible_asset_0.to_key_word(), // first data request COMPUTED_ASSETS_COMMITMENT_0 = output_note_0.assets().commitment(), assets_number_0 = output_note_0.assets().num_assets(), // second data request - asset_1 = Word::from(fungible_asset_1), + ASSET_1_VALUE = fungible_asset_1.to_value_word(), + ASSET_1_KEY = fungible_asset_1.to_key_word(), COMPUTED_ASSETS_COMMITMENT_1 = output_note_1.assets().commitment(), assets_number_1 = output_note_1.assets().num_assets(), ); @@ -815,7 +852,7 @@ async fn test_get_asset_info() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note_1)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_1)]) .tx_script(tx_script) .build()?; @@ -831,7 +868,9 @@ async fn test_get_recipient_and_metadata() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [FungibleAsset::mock(2000)], )?; @@ -843,7 +882,7 @@ async fn test_get_recipient_and_metadata() -> anyhow::Result<()> { vec![FungibleAsset::mock(5)], NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), )?; let tx_script_src = &format!( @@ -893,7 +932,7 @@ async fn test_get_recipient_and_metadata() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .tx_script(tx_script) .build()?; @@ -921,13 +960,13 @@ async fn test_get_assets() -> anyhow::Result<()> { push.{note_idx} push.{dest_ptr} # => [dest_ptr, note_index] - # write the assets to the memory + # write the assets to memory exec.output_note::get_assets # => [num_assets, dest_ptr, note_index] # assert the number of note assets push.{assets_number} - assert_eq.err="note {note_index} has incorrect assets number" + assert_eq.err="expected note {note_index} to have {assets_number} assets" # => [dest_ptr, note_index] "#, note_idx = note_index, @@ -940,19 +979,31 @@ async fn test_get_assets() -> anyhow::Result<()> { check_assets_code.push_str(&format!( r#" # load the asset stored in memory - padw dup.4 mem_loadw_be - # => [STORED_ASSET, dest_ptr, note_index] + padw dup.4 mem_loadw_le + # => [STORED_ASSET_KEY, dest_ptr, note_index] + + # assert the asset key matches + push.{NOTE_ASSET_KEY} + assert_eqw.err="expected asset key at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_KEY}" + # => [dest_ptr, note_index] + + # load the asset stored in memory + padw dup.4 add.{ASSET_VALUE_OFFSET} mem_loadw_le + # => [STORED_ASSET_VALUE, dest_ptr, note_index] - # assert the asset - push.{NOTE_ASSET} - assert_eqw.err="asset {asset_index} of the note {note_index} is incorrect" + # assert the asset value matches + push.{NOTE_ASSET_VALUE} + assert_eqw.err="expected asset value at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_VALUE}" # => [dest_ptr, note_index] # move the pointer - add.4 - # => [dest_ptr+4, note_index] + add.{ASSET_SIZE} + # => [dest_ptr+ASSET_SIZE, note_index] "#, - NOTE_ASSET = Word::from(*asset), + NOTE_ASSET_KEY = asset.to_key_word(), + NOTE_ASSET_VALUE = asset.to_value_word(), asset_index = asset_index, note_index = note_index, )); @@ -986,9 +1037,9 @@ async fn test_get_assets() -> anyhow::Result<()> { create_note_0 = create_output_note(&p2id_note_0_assets), check_note_0 = check_assets_code(0, 0, &p2id_note_0_assets), create_note_1 = create_output_note(&p2id_note_1_asset), - check_note_1 = check_assets_code(1, 4, &p2id_note_1_asset), + check_note_1 = check_assets_code(1, 8, &p2id_note_1_asset), create_note_2 = create_output_note(&p2id_note_2_assets), - check_note_2 = check_assets_code(2, 8, &p2id_note_2_assets), + check_note_2 = check_assets_code(2, 16, &p2id_note_2_assets), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; @@ -996,9 +1047,9 @@ async fn test_get_assets() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(p2id_note_0_assets), - OutputNote::Full(p2id_note_1_asset), - OutputNote::Full(p2id_note_2_assets), + RawOutputNote::Full(p2id_note_0_assets), + RawOutputNote::Full(p2id_note_1_asset), + RawOutputNote::Full(p2id_note_2_assets), ]) .tx_script(tx_script) .build()?; @@ -1011,10 +1062,10 @@ async fn test_get_assets() -> anyhow::Result<()> { #[tokio::test] async fn test_set_none_attachment() -> anyhow::Result<()> { let account = Account::mock(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, Auth::IncrNonce); - let rng = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])); + let rng = RandomCoin::new(Word::from([1, 2, 3, 4u32])); let attachment = NoteAttachment::default(); let output_note = - OutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); + RawOutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); let tx_script = format!( " @@ -1066,11 +1117,11 @@ async fn test_set_none_attachment() -> anyhow::Result<()> { #[tokio::test] async fn test_set_word_attachment() -> anyhow::Result<()> { let account = Account::mock(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, Auth::IncrNonce); - let rng = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])); + let rng = RandomCoin::new(Word::from([1, 2, 3, 4u32])); let attachment = NoteAttachment::new_word(NoteAttachmentScheme::new(u32::MAX), Word::from([3, 4, 5, 6u32])); let output_note = - OutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); + RawOutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); let tx_script = format!( " @@ -1120,11 +1171,11 @@ async fn test_set_word_attachment() -> anyhow::Result<()> { #[tokio::test] async fn test_set_array_attachment() -> anyhow::Result<()> { let account = Account::mock(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, Auth::IncrNonce); - let rng = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])); + let rng = RandomCoin::new(Word::from([1, 2, 3, 4u32])); let elements = [3, 4, 5, 6, 7, 8, 9u32].map(Felt::from).to_vec(); let attachment = NoteAttachment::new_array(NoteAttachmentScheme::new(42), elements.clone())?; let output_note = - OutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); + RawOutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); let tx_script = format!( " @@ -1176,7 +1227,7 @@ async fn test_set_array_attachment() -> anyhow::Result<()> { #[tokio::test] async fn test_set_network_target_account_attachment() -> anyhow::Result<()> { let account = Account::mock(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, Auth::IncrNonce); - let rng = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])); + let rng = RandomCoin::new(Word::from([1, 2, 3, 4u32])); let attachment = NetworkAccountTarget::new( ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET.try_into()?, NoteExecutionHint::on_block_slot(5, 32, 3), @@ -1195,7 +1246,7 @@ async fn test_set_network_target_account_attachment() -> anyhow::Result<()> { let actual_note = tx.output_notes().get_note(0); assert_eq!(actual_note.header(), output_note.header()); - assert_eq!(actual_note.assets().unwrap(), output_note.assets()); + assert_eq!(actual_note.assets(), output_note.assets()); // Make sure we can deserialize the attachment back into its original type. let actual_attachment = NetworkAccountTarget::try_from(actual_note.metadata().attachment())?; @@ -1204,6 +1255,75 @@ async fn test_set_network_target_account_attachment() -> anyhow::Result<()> { Ok(()) } +#[tokio::test] +async fn test_network_note() -> anyhow::Result<()> { + let sender = Account::mock(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, Auth::IncrNonce); + let mut rng = RandomCoin::new(Word::from([9, 8, 7, 6u32])); + + // --- Valid network note --- + let target_id = AccountId::try_from(ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET)?; + let attachment = NetworkAccountTarget::new(target_id, NoteExecutionHint::Always)?; + + let note = NoteBuilder::new(sender.id(), &mut rng) + .note_type(NoteType::Public) + .attachment(attachment) + .build()?; + + // is_network_note() returns true for a note with a valid NetworkAccountTarget attachment. + assert!(note.is_network_note()); + + // into_account_target_network_note() succeeds and accessors return correct values. + let expected_note_type = note.metadata().note_type(); + let network_note = note.into_account_target_network_note()?; + assert_eq!(network_note.target_account_id(), target_id); + assert_eq!(network_note.execution_hint(), NoteExecutionHint::Always); + assert_eq!(network_note.note_type(), expected_note_type); + + // TryFrom succeeds for a valid network note. + let valid_note = NoteBuilder::new(sender.id(), &mut rng) + .note_type(NoteType::Public) + .attachment(attachment) + .build()?; + let try_from_note = AccountTargetNetworkNote::try_from(valid_note)?; + assert_eq!(try_from_note.target_account_id(), target_id); + + // --- Invalid: note with default (empty) attachment --- + let non_network_note = + NoteBuilder::new(sender.id(), &mut rng).note_type(NoteType::Public).build()?; + + // is_network_note() returns false for a note without a NetworkAccountTarget attachment. + assert!(!non_network_note.is_network_note()); + + // AccountTargetNetworkNote::new() fails for an invalid attachment. + assert!(AccountTargetNetworkNote::new(non_network_note.clone()).is_err()); + + // into_account_target_network_note() fails for a non-network note. + assert!(non_network_note.clone().into_account_target_network_note().is_err()); + + // TryFrom fails for a non-network note. + assert!(AccountTargetNetworkNote::try_from(non_network_note).is_err()); + + // --- Invalid: private note with valid NetworkAccountTarget attachment --- + let private_network_note = NoteBuilder::new(sender.id(), &mut rng) + .note_type(NoteType::Private) + .attachment(attachment) + .build()?; + + // is_network_note() returns false for a private note even with a valid attachment. + assert!(!private_network_note.is_network_note()); + + // AccountTargetNetworkNote::new() fails for a private note. + assert!(AccountTargetNetworkNote::new(private_network_note.clone()).is_err()); + + // into_account_target_network_note() fails for a private note. + assert!(private_network_note.clone().into_account_target_network_note().is_err()); + + // TryFrom fails for a private note. + assert!(AccountTargetNetworkNote::try_from(private_network_note).is_err()); + + Ok(()) +} + // HELPER FUNCTIONS // ================================================================================================ @@ -1229,12 +1349,15 @@ fn create_output_note(note: &Note) -> String { create_note_code.push_str(&format!( " # move the asset to the note - push.{asset} + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] ", - asset = Word::from(*asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word() )); } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs b/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs index eb8f6cc3b3..3cb661b3a2 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs @@ -2,8 +2,8 @@ use alloc::collections::BTreeMap; use alloc::vec::Vec; use anyhow::Context; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{AdviceInputs, Word}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ExecutionOutput, Word}; use miden_protocol::account::{ Account, AccountBuilder, @@ -14,14 +14,18 @@ use miden_protocol::account::{ StorageSlot, StorageSlotName, }; -use miden_protocol::asset::FungibleAsset; +use miden_protocol::asset::{FungibleAsset, NonFungibleAsset}; +use miden_protocol::block::account_tree::AccountIdKey; use miden_protocol::errors::tx_kernel::ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH; +use miden_protocol::note::NoteId; use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; use miden_protocol::transaction::memory::{ ACCT_DB_ROOT_PTR, + ASSET_SIZE, + ASSET_VALUE_OFFSET, BLOCK_COMMITMENT_PTR, BLOCK_METADATA_PTR, BLOCK_NUMBER_IDX, @@ -72,12 +76,7 @@ use miden_protocol::transaction::memory::{ VALIDATOR_KEY_COMMITMENT_PTR, VERIFICATION_BASE_FEE_IDX, }; -use miden_protocol::transaction::{ - ExecutedTransaction, - TransactionAdviceInputs, - TransactionArgs, - TransactionKernel, -}; +use miden_protocol::transaction::{ExecutedTransaction, TransactionArgs, TransactionKernel}; use miden_protocol::{EMPTY_WORD, WORD_SIZE}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; @@ -109,7 +108,7 @@ async fn test_transaction_prologue() -> anyhow::Result<()> { ); let input_note_2 = create_public_p2any_note( ACCOUNT_ID_SENDER.try_into().unwrap(), - [FungibleAsset::mock(100)], + [FungibleAsset::mock(100), NonFungibleAsset::mock(&[1, 2, 3])], ); let input_note_3 = create_public_p2any_note( ACCOUNT_ID_SENDER.try_into().unwrap(), @@ -136,16 +135,15 @@ async fn test_transaction_prologue() -> anyhow::Result<()> { let tx_script = CodeBuilder::default().compile_tx_script(mock_tx_script_code).unwrap(); - let note_args = [Word::from([91u32; 4]), Word::from([92u32; 4])]; - + // Input note 2 does not have any note args. let note_args_map = BTreeMap::from([ - (tx_context.input_notes().get_note(0).note().id(), note_args[0]), - (tx_context.input_notes().get_note(1).note().id(), note_args[1]), + (tx_context.input_notes().get_note(0).note().id(), Word::from([91u32; 4])), + (tx_context.input_notes().get_note(1).note().id(), Word::from([92u32; 4])), ]); let tx_args = TransactionArgs::new(tx_context.tx_args().advice_inputs().clone().map) .with_tx_script(tx_script) - .with_note_args(note_args_map); + .with_note_args(note_args_map.clone()); tx_context.set_tx_args(tx_args); let exec_output = &tx_context.execute_code(code).await?; @@ -155,7 +153,7 @@ async fn test_transaction_prologue() -> anyhow::Result<()> { partial_blockchain_memory_assertions(exec_output, &tx_context); kernel_data_memory_assertions(exec_output); account_data_memory_assertions(exec_output, &tx_context); - input_notes_memory_assertions(exec_output, &tx_context, ¬e_args); + input_notes_memory_assertions(exec_output, &tx_context, ¬e_args_map); Ok(()) } @@ -266,19 +264,19 @@ fn block_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transact assert_eq!( exec_output.get_kernel_mem_word(BLOCK_METADATA_PTR)[BLOCK_NUMBER_IDX], - inputs.tx_inputs().block_header().block_num().into(), + Felt::from(inputs.tx_inputs().block_header().block_num()), "The block number should be stored at BLOCK_METADATA_PTR[BLOCK_NUMBER_IDX]" ); assert_eq!( exec_output.get_kernel_mem_word(BLOCK_METADATA_PTR)[PROTOCOL_VERSION_IDX], - inputs.tx_inputs().block_header().version().into(), + Felt::from(inputs.tx_inputs().block_header().version()), "The protocol version should be stored at BLOCK_METADATA_PTR[PROTOCOL_VERSION_IDX]" ); assert_eq!( exec_output.get_kernel_mem_word(BLOCK_METADATA_PTR)[TIMESTAMP_IDX], - inputs.tx_inputs().block_header().timestamp().into(), + Felt::from(inputs.tx_inputs().block_header().timestamp()), "The timestamp should be stored at BLOCK_METADATA_PTR[TIMESTAMP_IDX]" ); @@ -302,12 +300,7 @@ fn block_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transact assert_eq!( exec_output.get_kernel_mem_word(FEE_PARAMETERS_PTR)[VERIFICATION_BASE_FEE_IDX], - inputs - .tx_inputs() - .block_header() - .fee_parameters() - .verification_base_fee() - .into(), + Felt::from(inputs.tx_inputs().block_header().fee_parameters().verification_base_fee()), "The verification base fee should be stored at FEE_PARAMETERS_PTR[VERIFICATION_BASE_FEE_IDX]" ); @@ -350,7 +343,7 @@ fn kernel_data_memory_assertions(exec_output: &ExecutionOutput) { // check that the number of kernel procedures stored in the memory is equal to the number of // procedures in the `TransactionKernel::PROCEDURES` array assert_eq!( - exec_output.get_kernel_mem_word(NUM_KERNEL_PROCEDURES_PTR)[0].as_int(), + exec_output.get_kernel_mem_word(NUM_KERNEL_PROCEDURES_PTR)[0].as_canonical_u64(), TransactionKernel::PROCEDURES.len() as u64, "Number of the kernel procedures should be stored at the NUM_KERNEL_PROCEDURES_PTR" ); @@ -439,7 +432,7 @@ fn account_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transa fn input_notes_memory_assertions( exec_output: &ExecutionOutput, inputs: &TransactionContext, - note_args: &[Word], + note_args: &BTreeMap, ) { assert_eq!( exec_output.get_kernel_mem_word(INPUT_NOTE_SECTION_PTR), @@ -508,7 +501,7 @@ fn input_notes_memory_assertions( assert_eq!( exec_output.get_note_mem_word(note_idx, INPUT_NOTE_ARGS_OFFSET), - note_args[note_idx as usize], + note_args.get(&input_note.id()).copied().unwrap_or_default(), "note args should be stored at the correct offset" ); @@ -519,14 +512,22 @@ fn input_notes_memory_assertions( ); for (asset, asset_idx) in note.assets().iter().cloned().zip(0_u32..) { - let word: Word = asset.into(); + let asset_key = asset.to_key_word(); + let asset_value = asset.to_value_word(); + + let asset_key_addr = INPUT_NOTE_ASSETS_OFFSET + asset_idx * ASSET_SIZE; + let asset_value_addr = asset_key_addr + ASSET_VALUE_OFFSET; + + assert_eq!( + exec_output.get_note_mem_word(note_idx, asset_key_addr), + asset_key, + "asset key should be stored at the correct offset" + ); + assert_eq!( - exec_output.get_note_mem_word( - note_idx, - INPUT_NOTE_ASSETS_OFFSET + asset_idx * WORD_SIZE as u32 - ), - word, - "assets should be stored at (INPUT_NOTES_DATA_OFFSET + note_index * 2048 + 32 + asset_idx * 4)" + exec_output.get_note_mem_word(note_idx, asset_value_addr), + asset_value, + "asset value should be stored at the correct offset" ); } } @@ -632,7 +633,7 @@ pub async fn create_account_invalid_seed() -> anyhow::Result<()> { .expect("failed to get transaction inputs from mock chain"); // override the seed with an invalid seed to ensure the kernel fails - let account_seed_key = TransactionAdviceInputs::account_id_map_key(account.id()); + let account_seed_key = AccountIdKey::from(account.id()).as_word(); let adv_inputs = AdviceInputs::default().with_map([(account_seed_key, vec![ZERO; WORD_SIZE])]); let tx_context = TransactionContextBuilder::new(account) @@ -675,7 +676,7 @@ async fn test_get_blk_version() -> anyhow::Result<()> { assert_eq!( exec_output.get_stack_element(0), - tx_context.tx_inputs().block_header().version().into() + Felt::from(tx_context.tx_inputs().block_header().version()) ); Ok(()) @@ -701,7 +702,7 @@ async fn test_get_blk_timestamp() -> anyhow::Result<()> { assert_eq!( exec_output.get_stack_element(0), - tx_context.tx_inputs().block_header().timestamp().into() + Felt::from(tx_context.tx_inputs().block_header().timestamp()) ); Ok(()) diff --git a/crates/miden-testing/src/kernel_tests/tx/test_tx.rs b/crates/miden-testing/src/kernel_tests/tx/test_tx.rs index 55dc329d65..d0e3ab8ffd 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_tx.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_tx.rs @@ -2,7 +2,7 @@ use alloc::sync::Arc; use anyhow::Context; use assert_matches::assert_matches; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RandomCoin; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ @@ -46,8 +46,8 @@ use miden_protocol::testing::constants::{FUNGIBLE_ASSET_AMOUNT, NON_FUNGIBLE_ASS use miden_protocol::testing::note::DEFAULT_NOTE_CODE; use miden_protocol::transaction::{ InputNotes, - OutputNote, - OutputNotes, + RawOutputNote, + RawOutputNotes, TransactionArgs, TransactionKernel, TransactionSummary, @@ -75,11 +75,15 @@ async fn consuming_note_created_in_future_block_fails() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let asset = FungibleAsset::mock(400); let account1 = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [asset], )?; let account2 = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [asset], )?; let output_note = create_public_p2any_note(account1.id(), [asset]); @@ -92,7 +96,7 @@ async fn consuming_note_created_in_future_block_fails() -> anyhow::Result<()> { // against reference block 1 which we'll use for the later transaction. let tx = mock_chain .build_tx_context(account1.id(), &[spawn_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .build()? .execute() .await?; @@ -162,19 +166,19 @@ async fn test_block_procedures() -> anyhow::Result<()> { let exec_output = &tx_context.execute_code(code).await?; assert_eq!( - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), tx_context.tx_inputs().block_header().commitment(), "top word on the stack should be equal to the block header commitment" ); assert_eq!( - exec_output.get_stack_element(4).as_int(), + exec_output.get_stack_element(4).as_canonical_u64(), tx_context.tx_inputs().block_header().timestamp() as u64, "fifth element on the stack should be equal to the timestamp of the last block creation" ); assert_eq!( - exec_output.get_stack_element(5).as_int(), + exec_output.get_stack_element(5).as_canonical_u64(), tx_context.tx_inputs().block_header().block_num().as_u64(), "sixth element on the stack should be equal to the block number" ); @@ -227,6 +231,8 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { // In this test we create 3 notes. Note 1 is private, Note 2 is public and Note 3 is public // without assets. + let recipient_1 = Word::from([0, 1, 2, 3u32]); + // Create the expected output note for Note 2 which is public let serial_num_2 = Word::from([1, 2, 3, 4u32]); let note_script_2 = CodeBuilder::default().compile_note_script(DEFAULT_NOTE_CODE)?; @@ -253,24 +259,7 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { "\ use miden::standards::wallets::basic->wallet use miden::protocol::output_note - - #! Wrapper around move_asset_to_note for use with exec. - #! - #! Inputs: [ASSET, note_idx] - #! Outputs: [note_idx] - proc move_asset_to_note - # pad the stack before call - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [ASSET, note_idx, pad(11)] - - call.wallet::move_asset_to_note - dropw - # => [note_idx, pad(11)] - - # remove excess PADs from the stack - repeat.11 swap drop end - # => [note_idx] - end + use mock::util ## TRANSACTION SCRIPT ## ======================================================================================== @@ -278,21 +267,23 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { ## Send some assets from the account vault ## ------------------------------------------------------------------------------------ # partially deplete fungible asset balance - push.0.1.2.3 # recipient + push.{recipient_1} # recipient push.{NOTETYPE1} # note_type push.{tag1} # tag exec.output_note::create # => [note_idx = 0] - push.{REMOVED_ASSET_1} # asset_1 - # => [ASSET, note_idx] + dup + push.{REMOVED_ASSET_VALUE_1} + push.{REMOVED_ASSET_KEY_1} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] - exec.move_asset_to_note + exec.util::move_asset_to_note # => [note_idx] - push.{REMOVED_ASSET_2} # asset_2 - exec.move_asset_to_note - drop + push.{REMOVED_ASSET_VALUE_2} + push.{REMOVED_ASSET_KEY_2} + exec.util::move_asset_to_note # => [] # send non-fungible asset @@ -302,12 +293,16 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { exec.output_note::create # => [note_idx = 1] - push.{REMOVED_ASSET_3} # asset_3 - exec.move_asset_to_note + dup + push.{REMOVED_ASSET_VALUE_3} + push.{REMOVED_ASSET_KEY_3} + exec.util::move_asset_to_note # => [note_idx] - push.{REMOVED_ASSET_4} # asset_4 - exec.move_asset_to_note + dup + push.{REMOVED_ASSET_VALUE_4} + push.{REMOVED_ASSET_KEY_4} + exec.util::move_asset_to_note # => [note_idx] push.{ATTACHMENT2} @@ -330,10 +325,14 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { # => [] end ", - REMOVED_ASSET_1 = Word::from(removed_asset_1), - REMOVED_ASSET_2 = Word::from(removed_asset_2), - REMOVED_ASSET_3 = Word::from(removed_asset_3), - REMOVED_ASSET_4 = Word::from(removed_asset_4), + REMOVED_ASSET_KEY_1 = removed_asset_1.to_key_word(), + REMOVED_ASSET_VALUE_1 = removed_asset_1.to_value_word(), + REMOVED_ASSET_KEY_2 = removed_asset_2.to_key_word(), + REMOVED_ASSET_VALUE_2 = removed_asset_2.to_value_word(), + REMOVED_ASSET_KEY_3 = removed_asset_3.to_key_word(), + REMOVED_ASSET_VALUE_3 = removed_asset_3.to_value_word(), + REMOVED_ASSET_KEY_4 = removed_asset_4.to_key_word(), + REMOVED_ASSET_VALUE_4 = removed_asset_4.to_value_word(), RECIPIENT2 = expected_output_note_2.recipient().digest(), RECIPIENT3 = expected_output_note_3.recipient().digest(), NOTETYPE1 = note_type1 as u8, @@ -345,7 +344,7 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { ATTACHMENT3 = attachment3.content().to_word(), ); - let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(tx_script_src)?; // expected delta // -------------------------------------------------------------------------------------------- @@ -359,8 +358,8 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { .tx_script(tx_script) .extend_advice_map(vec![(attachment3.content().to_word(), array.as_slice().to_vec())]) .extend_expected_output_notes(vec![ - OutputNote::Full(expected_output_note_2.clone()), - OutputNote::Full(expected_output_note_3.clone()), + RawOutputNote::Full(expected_output_note_2.clone()), + RawOutputNote::Full(expected_output_note_3.clone()), ]) .build()?; @@ -376,9 +375,8 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { // assert that the expected output note 1 is present let resulting_output_note_1 = executed_transaction.output_notes().get_note(0); - let expected_recipient_1 = Word::from([0, 1, 2, 3u32]); let expected_note_assets_1 = NoteAssets::new(vec![combined_asset])?; - let expected_note_id_1 = NoteId::new(expected_recipient_1, expected_note_assets_1.commitment()); + let expected_note_id_1 = NoteId::new(recipient_1, expected_note_assets_1.commitment()); assert_eq!(resulting_output_note_1.id(), expected_note_id_1); // assert that the expected output note 2 is present @@ -395,7 +393,7 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { let resulting_output_note_3 = executed_transaction.output_notes().get_note(2); assert_eq!(expected_output_note_3.id(), resulting_output_note_3.id()); - assert_eq!(expected_output_note_3.assets(), resulting_output_note_3.assets().unwrap()); + assert_eq!(expected_output_note_3.assets(), resulting_output_note_3.assets()); // make sure that the number of note storage items remains the same let resulting_note_2_recipient = @@ -425,19 +423,23 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { const AUTH_UNAUTHORIZED_EVENT=event("miden::protocol::auth::unauthorized") #! Inputs: [AUTH_ARGS, pad(12)] #! Outputs: [pad(16)] + @auth_script pub proc auth_abort_tx dropw # => [pad(16)] - push.0.0 exec.tx::get_block_number exec.::miden::protocol::native_account::incr_nonce - # => [[final_nonce, block_num, 0, 0], pad(16)] + exec.tx::get_block_number + push.0.0 + # => [[0, 0, block_num, final_nonce], pad(16)] # => [SALT, pad(16)] exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] - exec.auth::adv_insert_hqword + # insert tx summary into advice provider for extraction by the host + adv.insert_hqword + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] exec.auth::hash_tx_summary # => [MESSAGE, pad(16)] @@ -464,7 +466,7 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { .context("failed to build account")?; // Consume and create a note so the input and outputs notes commitment is not the empty word. - let mut rng = RpoRandomCoin::new(Word::empty()); + let mut rng = RandomCoin::new(Word::empty()); let output_note = P2idNote::create( account.id(), account.id(), @@ -476,21 +478,21 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { let input_note = create_spawn_note(vec![&output_note])?; let mut builder = MockChain::builder(); - builder.add_output_note(OutputNote::Full(input_note.clone())); + builder.add_output_note(RawOutputNote::Full(input_note.clone())); let mock_chain = builder.build()?; let tx_context = mock_chain.build_tx_context(account, &[input_note.id()], &[])?.build()?; let ref_block_num = tx_context.tx_inputs().block_header().block_num().as_u32(); - let final_nonce = tx_context.account().nonce().as_int() as u32 + 1; + let final_nonce = tx_context.account().nonce().as_canonical_u64() as u32 + 1; let input_notes = tx_context.input_notes().clone(); - let output_notes = OutputNotes::new(vec![OutputNote::Partial(output_note.into())])?; + let output_notes = RawOutputNotes::new(vec![RawOutputNote::Partial(output_note.into())])?; let error = tx_context.execute().await.unwrap_err(); assert_matches!(error, TransactionExecutorError::Unauthorized(tx_summary) => { assert!(tx_summary.account_delta().vault().is_empty()); assert!(tx_summary.account_delta().storage().is_empty()); - assert_eq!(tx_summary.account_delta().nonce_delta().as_int(), 1); + assert_eq!(tx_summary.account_delta().nonce_delta().as_canonical_u64(), 1); assert_eq!(tx_summary.input_notes(), &input_notes); assert_eq!(tx_summary.output_notes(), &output_notes); assert_eq!(tx_summary.salt(), Word::from( @@ -506,9 +508,10 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { #[tokio::test] async fn tx_summary_commitment_is_signed_by_falcon_auth() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let account = builder - .add_existing_mock_account(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - let mut rng = RpoRandomCoin::new(Word::empty()); + let account = builder.add_existing_mock_account(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let mut rng = RandomCoin::new(Word::empty()); let p2id_note = P2idNote::create( account.id(), account.id(), @@ -534,7 +537,7 @@ async fn tx_summary_commitment_is_signed_by_falcon_auth() -> anyhow::Result<()> 0, 0, tx.block_header().block_num().as_u32(), - tx.final_account().nonce().as_int() as u32, + tx.final_account().nonce().as_canonical_u64() as u32, ]), ); let summary_commitment = summary.to_commitment(); @@ -544,9 +547,9 @@ async fn tx_summary_commitment_is_signed_by_falcon_auth() -> anyhow::Result<()> AuthMethod::SingleSig { approver: (pub_key, _) } => pub_key, AuthMethod::NoAuth => panic!("Expected SingleSig auth scheme, got NoAuth"), AuthMethod::Multisig { .. } => { - panic!("Expected SingleSig auth scheme, got Falcon512RpoMultisig") + panic!("Expected SingleSig auth scheme, got Multisig") }, - AuthMethod::Unknown => panic!("Expected Falcon512Rpo auth scheme, got Unknown"), + AuthMethod::Unknown => panic!("Expected SingleSig auth scheme, got Unknown"), }; // This is in an internal detail of the tx executor host, but this is the easiest way to check @@ -567,7 +570,7 @@ async fn tx_summary_commitment_is_signed_by_ecdsa_auth() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let account = builder .add_existing_mock_account(Auth::BasicAuth { auth_scheme: AuthScheme::EcdsaK256Keccak })?; - let mut rng = RpoRandomCoin::new(Word::empty()); + let mut rng = RandomCoin::new(Word::empty()); let p2id_note = P2idNote::create( account.id(), account.id(), @@ -593,7 +596,7 @@ async fn tx_summary_commitment_is_signed_by_ecdsa_auth() -> anyhow::Result<()> { 0, 0, tx.block_header().block_num().as_u32(), - tx.final_account().nonce().as_int() as u32, + tx.final_account().nonce().as_canonical_u64() as u32, ]), ); let summary_commitment = summary.to_commitment(); @@ -709,25 +712,28 @@ async fn test_tx_script_inputs() -> anyhow::Result<()> { #[tokio::test] async fn test_tx_script_args() -> anyhow::Result<()> { let tx_script_args = Word::from([1, 2, 3, 4u32]); + let advice_entry = Word::from([5, 6, 7, 8u32]); - let tx_script_src = r#" + let tx_script_src = format!( + r#" begin # => [TX_SCRIPT_ARGS] # `TX_SCRIPT_ARGS` value is a user provided word, which could be used during the # transaction execution. In this example it is a `[1, 2, 3, 4]` word. # assert the correctness of the argument - dupw push.1.2.3.4 assert_eqw.err="provided transaction arguments don't match the expected ones" + dupw push.{tx_script_args} assert_eqw.err="provided transaction arguments don't match the expected ones" # => [TX_SCRIPT_ARGS] # since we provided an advice map entry with the transaction script arguments as a key, # we can obtain the value of this entry - adv.push_mapval adv_push.4 + adv.push_mapval padw adv_loadw # => [[map_entry_values], TX_SCRIPT_ARGS] # assert the correctness of the map entry values - push.5.6.7.8 assert_eqw.err="obtained advice map value doesn't match the expected one" - end"#; + push.{advice_entry} assert_eqw.err="obtained advice map value doesn't match the expected one" + end"# + ); let tx_script = CodeBuilder::default() .compile_tx_script(tx_script_src) @@ -737,10 +743,7 @@ async fn test_tx_script_args() -> anyhow::Result<()> { // argument let tx_context = TransactionContextBuilder::with_existing_mock_account() .tx_script(tx_script) - .extend_advice_map([( - tx_script_args, - vec![Felt::new(5), Felt::new(6), Felt::new(7), Felt::new(8)], - )]) + .extend_advice_map([(tx_script_args, advice_entry.as_elements().to_vec())]) .tx_script_args(tx_script_args) .build()?; @@ -825,8 +828,9 @@ async fn inputs_created_correctly() -> anyhow::Result<()> { async fn tx_can_be_reexecuted() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Use basic auth so the tx requires a signature for successful execution. - let account = builder - .add_existing_mock_account(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let account = builder.add_existing_mock_account(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into()?, account.id(), diff --git a/crates/miden-testing/src/mock_chain/auth.rs b/crates/miden-testing/src/mock_chain/auth.rs index a389046d18..5b7f06b06a 100644 --- a/crates/miden-testing/src/mock_chain/auth.rs +++ b/crates/miden-testing/src/mock_chain/auth.rs @@ -9,9 +9,12 @@ use miden_protocol::testing::noop_auth_component::NoopAuthComponent; use miden_standards::account::auth::{ AuthMultisig, AuthMultisigConfig, + AuthMultisigPsm, + AuthMultisigPsmConfig, AuthSingleSig, AuthSingleSigAcl, AuthSingleSigAclConfig, + PsmConfig, }; use miden_standards::testing::account_component::{ ConditionalAuthComponent, @@ -31,7 +34,15 @@ pub enum Auth { /// Multisig Multisig { threshold: u32, - approvers: Vec<(Word, AuthScheme)>, + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + proc_threshold_map: Vec<(Word, u32)>, + }, + + /// Multisig with a private state manager. + MultisigPsm { + threshold: u32, + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + psm_config: PsmConfig, proc_threshold_map: Vec<(Word, u32)>, }, @@ -77,14 +88,7 @@ impl Auth { (component, Some(authenticator)) }, Auth::Multisig { threshold, approvers, proc_threshold_map } => { - let approvers = approvers - .iter() - .map(|(pub_key, auth_scheme)| { - (PublicKeyCommitment::from(*pub_key), *auth_scheme) - }) - .collect(); - - let config = AuthMultisigConfig::new(approvers, *threshold) + let config = AuthMultisigConfig::new(approvers.clone(), *threshold) .and_then(|cfg| cfg.with_proc_thresholds(proc_threshold_map.clone())) .expect("invalid multisig config"); let component = @@ -92,6 +96,21 @@ impl Auth { (component, None) }, + Auth::MultisigPsm { + threshold, + approvers, + psm_config, + proc_threshold_map, + } => { + let config = AuthMultisigPsmConfig::new(approvers.clone(), *threshold, *psm_config) + .and_then(|cfg| cfg.with_proc_thresholds(proc_threshold_map.clone())) + .expect("invalid multisig psm config"); + let component = AuthMultisigPsm::new(config) + .expect("multisig psm component creation failed") + .into(); + + (component, None) + }, Auth::Acl { auth_trigger_procedures, allow_unauthorized_output_notes, diff --git a/crates/miden-testing/src/mock_chain/chain.rs b/crates/miden-testing/src/mock_chain/chain.rs index 522c0f2689..29f6f41090 100644 --- a/crates/miden-testing/src/mock_chain/chain.rs +++ b/crates/miden-testing/src/mock_chain/chain.rs @@ -3,7 +3,7 @@ use alloc::vec::Vec; use anyhow::Context; use miden_block_prover::LocalBlockProver; -use miden_processor::DeserializationError; +use miden_processor::serde::DeserializationError; use miden_protocol::MIN_PROOF_SECURITY_LEVEL; use miden_protocol::account::auth::{AuthSecretKey, PublicKey}; use miden_protocol::account::delta::AccountUpdateDetails; @@ -32,9 +32,8 @@ use miden_protocol::transaction::{ }; use miden_tx::LocalTransactionProver; use miden_tx::auth::BasicAuthenticator; -use miden_tx::utils::{ByteReader, Deserializable, Serializable}; +use miden_tx::utils::serde::{ByteReader, ByteWriter, Deserializable, Serializable}; use miden_tx_batch_prover::LocalBatchProver; -use winterfell::ByteWriter; use super::note::MockChainNote; use crate::{MockChainBuilder, TransactionContextBuilder}; @@ -78,9 +77,10 @@ use crate::{MockChainBuilder, TransactionContextBuilder}; /// let mut builder = MockChain::builder(); /// /// // Add a recipient wallet with basic authentication. -/// // Use either ECDSA K256 Keccak (scheme_id: 1) or Falcon512Rpo (scheme_id: 2) auth scheme. -/// let receiver = -/// builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; +/// // Use either ECDSA K256 Keccak (scheme_id: 1) or Falcon512Poseidon2 (scheme_id: 2) auth scheme. +/// let receiver = builder.add_existing_wallet(Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// })?; /// /// // Add a wallet with assets. /// let sender = builder.add_existing_wallet(Auth::IncrNonce)?; @@ -120,7 +120,7 @@ use crate::{MockChainBuilder, TransactionContextBuilder}; /// .committed_account(receiver.id())? /// .vault() /// .get_balance(fungible_asset.faucet_id())?, -/// fungible_asset.amount() +/// fungible_asset.amount().inner() /// ); /// # Ok(()) /// # } @@ -143,16 +143,20 @@ use crate::{MockChainBuilder, TransactionContextBuilder}; /// let mut builder = MockChain::builder(); /// /// let faucet = builder.create_new_faucet( -/// Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, +/// Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// }, /// "USDT", /// 100_000, /// )?; /// let asset = Asset::from(FungibleAsset::new(faucet.id(), 10)?); /// -/// let sender = -/// builder.create_new_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; -/// let target = -/// builder.create_new_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; +/// let sender = builder.create_new_wallet(Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// })?; +/// let target = builder.create_new_wallet(Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// })?; /// /// let note = builder.add_p2id_note(faucet.id(), target.id(), &[asset], NoteType::Public)?; /// @@ -183,6 +187,9 @@ pub struct MockChain { /// block. pending_transactions: Vec, + /// Batches that have been submitted to the chain but have not yet been included in a block. + pending_batches: Vec, + /// NoteID |-> MockChainNote mapping to simplify note retrieval. committed_notes: BTreeMap, @@ -232,6 +239,7 @@ impl MockChain { account_tree: AccountTree, account_authenticators: BTreeMap, secret_key: SecretKey, + genesis_notes: Vec, ) -> anyhow::Result { let mut chain = MockChain { chain: Blockchain::default(), @@ -239,6 +247,7 @@ impl MockChain { nullifier_tree: NullifierTree::default(), account_tree, pending_transactions: Vec::new(), + pending_batches: Vec::new(), committed_notes: BTreeMap::new(), committed_accounts: BTreeMap::new(), account_authenticators, @@ -251,6 +260,20 @@ impl MockChain { .apply_block(genesis_block) .context("failed to build account from builder")?; + // Update committed_notes with full note details for genesis notes. + // This is needed because apply_block only stores headers for private notes, + // but tests need full note details to create input notes. + for note in genesis_notes { + if let Some(MockChainNote::Private(_, _, inclusion_proof)) = + chain.committed_notes.get(¬e.id()) + { + chain.committed_notes.insert( + note.id(), + MockChainNote::Public(note.clone(), inclusion_proof.clone()), + ); + } + } + debug_assert_eq!(chain.blocks.len(), 1); debug_assert_eq!(chain.committed_accounts.len(), chain.account_tree.num_accounts()); @@ -851,6 +874,14 @@ impl MockChain { self.pending_transactions.push(transaction); } + /// Adds the given [`ProvenBatch`] to the list of pending batches. + /// + /// A block has to be created to apply the batch effects to the chain state, e.g. using + /// [`MockChain::prove_next_block`]. + pub fn add_pending_batch(&mut self, batch: ProvenBatch) { + self.pending_batches.push(batch); + } + // PRIVATE HELPERS // ---------------------------------------------------------------------------------------- @@ -913,9 +944,11 @@ impl MockChain { ) .context("failed to create inclusion proof for output note")?; - if let OutputNote::Full(note) = created_note { - self.committed_notes - .insert(note.id(), MockChainNote::Public(note.clone(), note_inclusion_proof)); + if let OutputNote::Public(public_note) = created_note { + self.committed_notes.insert( + public_note.id(), + MockChainNote::Public(public_note.as_note().clone(), note_inclusion_proof), + ); } else { self.committed_notes.insert( created_note.id(), @@ -975,7 +1008,8 @@ impl MockChain { // Create batches from pending transactions. // ---------------------------------------------------------------------------------------- - let batches = self.pending_transactions_to_batches()?; + let mut batches = self.pending_transactions_to_batches()?; + batches.extend(core::mem::take(&mut self.pending_batches)); // Create block. // ---------------------------------------------------------------------------------------- @@ -1052,6 +1086,7 @@ impl Deserializable for MockChain { nullifier_tree, account_tree, pending_transactions, + pending_batches: Vec::new(), committed_notes, committed_accounts, account_authenticators, @@ -1207,7 +1242,7 @@ mod tests { )?; let account_id = account.id(); - assert_eq!(account.nonce().as_int(), 0); + assert_eq!(account.nonce().as_canonical_u64(), 0); let note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), @@ -1228,7 +1263,7 @@ mod tests { mock_chain.add_pending_executed_transaction(&tx)?; mock_chain.prove_next_block()?; - assert!(tx.final_account().nonce().as_int() > 0); + assert!(tx.final_account().nonce().as_canonical_u64() > 0); assert_eq!( tx.final_account().to_commitment(), mock_chain.account_tree.open(account_id).state_commitment() @@ -1245,7 +1280,9 @@ mod tests { for i in 0..10 { let account = builder .add_account_from_builder( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, AccountBuilder::new([i; 32]).with_component(BasicWallet), AccountState::New, ) @@ -1326,4 +1363,26 @@ mod tests { Ok(()) } + + #[tokio::test] + async fn add_pending_batch() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + let account = builder.add_existing_mock_account(Auth::IncrNonce)?; + let mut chain = builder.build()?; + + // Execute a noop transaction and create a batch from it. + let tx = chain.build_tx_context(account.id(), &[], &[])?.build()?.execute().await?; + let proven_tx = LocalTransactionProver::default().prove_dummy(tx)?; + let proposed_batch = chain.propose_transaction_batch(vec![proven_tx])?; + let proven_batch = chain.prove_transaction_batch(proposed_batch)?; + + // Submit the batch directly and prove the block. + let num_blocks_before = chain.proven_blocks().len(); + chain.add_pending_batch(proven_batch); + chain.prove_next_block()?; + + assert_eq!(chain.proven_blocks().len(), num_blocks_before + 1); + + Ok(()) + } } diff --git a/crates/miden-testing/src/mock_chain/chain_builder.rs b/crates/miden-testing/src/mock_chain/chain_builder.rs index 7da3e77c0f..31ec1c6b8e 100644 --- a/crates/miden-testing/src/mock_chain/chain_builder.rs +++ b/crates/miden-testing/src/mock_chain/chain_builder.rs @@ -13,11 +13,12 @@ const DEFAULT_FAUCET_DECIMALS: u8 = 10; // ================================================================================================ use itertools::Itertools; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RandomCoin; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{ Account, AccountBuilder, + AccountComponent, AccountDelta, AccountId, AccountStorageMode, @@ -44,9 +45,15 @@ use miden_protocol::errors::NoteError; use miden_protocol::note::{Note, NoteAttachment, NoteDetails, NoteType}; use miden_protocol::testing::account_id::ACCOUNT_ID_NATIVE_ASSET_FAUCET; use miden_protocol::testing::random_secret_key::random_secret_key; -use miden_protocol::transaction::{OrderedTransactionHeaders, OutputNote, TransactionKernel}; +use miden_protocol::transaction::{OrderedTransactionHeaders, RawOutputNote, TransactionKernel}; use miden_protocol::{Felt, MAX_OUTPUT_NOTES_PER_BATCH, Word}; +use miden_standards::account::access::Ownable2Step; use miden_standards::account::faucets::{BasicFungibleFaucet, NetworkFungibleFaucet}; +use miden_standards::account::mint_policies::{ + AuthControlled, + OwnerControlled, + OwnerControlledInitConfig, +}; use miden_standards::account::wallets::BasicWallet; use miden_standards::note::{P2idNote, P2ideNote, P2ideNoteStorage, SwapNote}; use miden_standards::testing::account_component::MockAccountComponent; @@ -103,8 +110,8 @@ use crate::{AccountState, Auth, MockChain}; pub struct MockChainBuilder { accounts: BTreeMap, account_authenticators: BTreeMap, - notes: Vec, - rng: RpoRandomCoin, + notes: Vec, + rng: RandomCoin, // Fee parameters. native_asset_id: AccountId, verification_base_fee: u32, @@ -128,7 +135,7 @@ impl MockChainBuilder { accounts: BTreeMap::new(), account_authenticators: BTreeMap::new(), notes: Vec::new(), - rng: RpoRandomCoin::new(Default::default()), + rng: RandomCoin::new(Default::default()), native_asset_id, verification_base_fee: 0, } @@ -196,7 +203,22 @@ impl MockChainBuilder { ) .context("failed to create genesis account tree")?; - let note_chunks = self.notes.into_iter().chunks(MAX_OUTPUT_NOTES_PER_BATCH); + // Extract full notes before shrinking for later use in MockChain + let full_notes: Vec = self + .notes + .iter() + .filter_map(|note| match note { + RawOutputNote::Full(n) => Some(n.clone()), + _ => None, + }) + .collect(); + + let proven_notes: Vec<_> = self + .notes + .into_iter() + .map(|note| note.to_output_note().expect("genesis note should be valid")) + .collect(); + let note_chunks = proven_notes.into_iter().chunks(MAX_OUTPUT_NOTES_PER_BATCH); let output_note_batches: Vec = note_chunks .into_iter() .map(|batch_notes| batch_notes.into_iter().enumerate().collect::>()) @@ -254,6 +276,7 @@ impl MockChainBuilder { account_tree, self.account_authenticators, validator_secret_key, + full_notes, ) } @@ -307,9 +330,7 @@ impl MockChainBuilder { ) -> anyhow::Result { let token_symbol = TokenSymbol::new(token_symbol) .with_context(|| format!("invalid token symbol: {token_symbol}"))?; - let max_supply_felt = max_supply.try_into().map_err(|_| { - anyhow::anyhow!("max supply value cannot be converted to Felt: {max_supply}") - })?; + let max_supply_felt = Felt::try_from(max_supply)?; let basic_faucet = BasicFungibleFaucet::new(token_symbol, DEFAULT_FAUCET_DECIMALS, max_supply_felt) .context("failed to create BasicFungibleFaucet")?; @@ -317,7 +338,8 @@ impl MockChainBuilder { let account_builder = AccountBuilder::new(self.rng.random()) .storage_mode(AccountStorageMode::Public) .account_type(AccountType::FungibleFaucet) - .with_component(basic_faucet); + .with_component(basic_faucet) + .with_component(AuthControlled::allow_all()); self.add_account_from_builder(auth_method, account_builder, AccountState::New) } @@ -333,10 +355,8 @@ impl MockChainBuilder { max_supply: u64, token_supply: Option, ) -> anyhow::Result { - let max_supply = Felt::try_from(max_supply) - .map_err(|err| anyhow::anyhow!("failed to convert max_supply to felt: {err}"))?; - let token_supply = Felt::try_from(token_supply.unwrap_or(0)) - .map_err(|err| anyhow::anyhow!("failed to convert token_supply to felt: {err}"))?; + let max_supply = Felt::try_from(max_supply)?; + let token_supply = Felt::try_from(token_supply.unwrap_or(0))?; let token_symbol = TokenSymbol::new(token_symbol).context("failed to create token symbol")?; @@ -348,6 +368,7 @@ impl MockChainBuilder { let account_builder = AccountBuilder::new(self.rng.random()) .storage_mode(AccountStorageMode::Public) .with_component(basic_faucet) + .with_component(AuthControlled::allow_all()) .account_type(AccountType::FungibleFaucet); self.add_account_from_builder(auth_method, account_builder, AccountState::Exists) @@ -362,26 +383,23 @@ impl MockChainBuilder { max_supply: u64, owner_account_id: AccountId, token_supply: Option, + mint_policy: OwnerControlledInitConfig, ) -> anyhow::Result { - let max_supply = Felt::try_from(max_supply) - .map_err(|err| anyhow::anyhow!("failed to convert max_supply to felt: {err}"))?; - let token_supply = Felt::try_from(token_supply.unwrap_or(0)) - .map_err(|err| anyhow::anyhow!("failed to convert token_supply to felt: {err}"))?; + let max_supply = Felt::try_from(max_supply)?; + let token_supply = Felt::try_from(token_supply.unwrap_or(0))?; let token_symbol = TokenSymbol::new(token_symbol).context("failed to create token symbol")?; - let network_faucet = NetworkFungibleFaucet::new( - token_symbol, - DEFAULT_FAUCET_DECIMALS, - max_supply, - owner_account_id, - ) - .and_then(|fungible_faucet| fungible_faucet.with_token_supply(token_supply)) - .context("failed to create network fungible faucet")?; + let network_faucet = + NetworkFungibleFaucet::new(token_symbol, DEFAULT_FAUCET_DECIMALS, max_supply) + .and_then(|fungible_faucet| fungible_faucet.with_token_supply(token_supply)) + .context("failed to create network fungible faucet")?; let account_builder = AccountBuilder::new(self.rng.random()) .storage_mode(AccountStorageMode::Network) .with_component(network_faucet) + .with_component(Ownable2Step::new(owner_account_id)) + .with_component(OwnerControlled::new(mint_policy)) .account_type(AccountType::FungibleFaucet); // Network faucets always use IncrNonce auth (no authentication) @@ -476,6 +494,20 @@ impl MockChainBuilder { Ok(account) } + pub fn add_existing_account_from_components( + &mut self, + auth: Auth, + components: impl IntoIterator, + ) -> anyhow::Result { + let mut account_builder = + Account::builder(rand::rng().random()).storage_mode(AccountStorageMode::Public); + + for component in components { + account_builder = account_builder.with_component(component); + } + + self.add_account_from_builder(auth, account_builder, AccountState::Exists) + } /// Adds the provided account to the list of genesis accounts. /// @@ -497,7 +529,7 @@ impl MockChainBuilder { // ---------------------------------------------------------------------------------------- /// Adds the provided note to the initial chain state. - pub fn add_output_note(&mut self, note: impl Into) { + pub fn add_output_note(&mut self, note: impl Into) { self.notes.push(note.into()); } @@ -512,7 +544,7 @@ impl MockChainBuilder { assets: impl IntoIterator, ) -> anyhow::Result { let note = create_p2any_note(sender_account_id, note_type, assets, &mut self.rng); - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } @@ -537,12 +569,12 @@ impl MockChainBuilder { NoteAttachment::default(), &mut self.rng, )?; - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } - /// Adds a P2IDE [`OutputNote`] (pay‑to‑ID‑extended) to the list of genesis notes. + /// Adds a P2IDE note (pay‑to‑ID‑extended) to the list of genesis notes. /// /// A P2IDE note can include an optional `timelock_height` and/or an optional /// `reclaim_height` after which the `sender_account_id` may reclaim the @@ -567,12 +599,12 @@ impl MockChainBuilder { &mut self.rng, )?; - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } - /// Adds a public SWAP [`OutputNote`] to the list of genesis notes. + /// Adds a public SWAP note to the list of genesis notes. pub fn add_swap_note( &mut self, sender: AccountId, @@ -591,7 +623,7 @@ impl MockChainBuilder { &mut self.rng, )?; - self.add_output_note(OutputNote::Full(swap_note.clone())); + self.add_output_note(RawOutputNote::Full(swap_note.clone())); Ok((swap_note, payback_note)) } @@ -614,7 +646,7 @@ impl MockChainBuilder { I: ExactSizeIterator, { let note = create_spawn_note(output_notes)?; - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } @@ -647,7 +679,7 @@ impl MockChainBuilder { /// Returns a mutable reference to the builder's RNG. /// /// This can be used when creating accounts or notes and randomness is required. - pub fn rng_mut(&mut self) -> &mut RpoRandomCoin { + pub fn rng_mut(&mut self) -> &mut RandomCoin { &mut self.rng } diff --git a/crates/miden-testing/src/mock_chain/note.rs b/crates/miden-testing/src/mock_chain/note.rs index 759ef257ea..233c751f15 100644 --- a/crates/miden-testing/src/mock_chain/note.rs +++ b/crates/miden-testing/src/mock_chain/note.rs @@ -1,8 +1,7 @@ -use miden_processor::DeserializationError; +use miden_processor::serde::DeserializationError; use miden_protocol::note::{Note, NoteId, NoteInclusionProof, NoteMetadata}; use miden_protocol::transaction::InputNote; -use miden_tx::utils::{ByteReader, Deserializable, Serializable}; -use winterfell::ByteWriter; +use miden_tx::utils::serde::{ByteReader, ByteWriter, Deserializable, Serializable}; // MOCK CHAIN NOTE // ================================================================================================ diff --git a/crates/miden-testing/src/mock_host.rs b/crates/miden-testing/src/mock_host.rs index 993e730c07..7bfecaf079 100644 --- a/crates/miden-testing/src/mock_host.rs +++ b/crates/miden-testing/src/mock_host.rs @@ -2,17 +2,12 @@ use alloc::collections::BTreeSet; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - AsyncHost, - BaseHost, - EventError, - FutureMaybeSend, - MastForest, - ProcessState, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::EventError; +use miden_processor::mast::MastForest; +use miden_processor::{FutureMaybeSend, Host, ProcessorState}; use miden_protocol::transaction::TransactionEventId; -use miden_protocol::vm::EventId; +use miden_protocol::vm::{EventId, EventName}; use miden_protocol::{CoreLibrary, Word}; use miden_tx::TransactionExecutorHost; use miden_tx::auth::UnreachableAuth; @@ -89,7 +84,7 @@ impl<'store> MockHost<'store> { } } -impl<'store> BaseHost for MockHost<'store> { +impl<'store> Host for MockHost<'store> { fn get_label_and_source_file( &self, location: &miden_protocol::assembly::debuginfo::Location, @@ -99,16 +94,14 @@ impl<'store> BaseHost for MockHost<'store> { ) { self.exec_host.get_label_and_source_file(location) } -} -impl<'store> AsyncHost for MockHost<'store> { fn get_mast_forest(&self, node_digest: &Word) -> impl FutureMaybeSend>> { self.exec_host.get_mast_forest(node_digest) } fn on_event( &mut self, - process: &ProcessState, + process: &ProcessorState, ) -> impl FutureMaybeSend, EventError>> { let event_id = EventId::from_felt(process.get_stack_item(0)); @@ -121,4 +114,8 @@ impl<'store> AsyncHost for MockHost<'store> { } } } + + fn resolve_event(&self, event_id: EventId) -> Option<&EventName> { + self.exec_host.resolve_event(event_id) + } } diff --git a/crates/miden-testing/src/standards/network_account_target.rs b/crates/miden-testing/src/standards/network_account_target.rs index 3e750d44b3..0b7d3d47c2 100644 --- a/crates/miden-testing/src/standards/network_account_target.rs +++ b/crates/miden-testing/src/standards/network_account_target.rs @@ -40,7 +40,7 @@ async fn network_account_target_get_id() -> anyhow::Result<()> { assert.err=ERR_NOT_NETWORK_ACCOUNT_TARGET # => [NOTE_ATTACHMENT] exec.network_account_target::get_id - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] # cleanup stack movup.2 drop movup.2 drop end @@ -51,8 +51,8 @@ async fn network_account_target_get_id() -> anyhow::Result<()> { let exec_output = CodeExecutor::with_default_host().run(&source).await?; - assert_eq!(exec_output.stack[0], target_id.prefix().as_felt()); - assert_eq!(exec_output.stack[1], target_id.suffix()); + assert_eq!(exec_output.stack[0], target_id.suffix()); + assert_eq!(exec_output.stack[1], target_id.prefix().as_felt()); Ok(()) } @@ -74,9 +74,9 @@ async fn network_account_target_new_attachment() -> anyhow::Result<()> { begin push.{exec_hint} - push.{target_id_suffix} push.{target_id_prefix} - # => [target_id_prefix, target_id_suffix, exec_hint] + push.{target_id_suffix} + # => [target_id_suffix, target_id_prefix, exec_hint] exec.network_account_target::new # => [attachment_scheme, attachment_kind, ATTACHMENT, pad(16)] @@ -97,7 +97,8 @@ async fn network_account_target_new_attachment() -> anyhow::Result<()> { Felt::from(NetworkAccountTarget::ATTACHMENT_SCHEME.as_u32()) ); - assert_eq!(exec_output.stack.get_stack_word_be(2).unwrap(), attachment_word); + let word = exec_output.stack.get_word(2).unwrap(); + assert_eq!(word, attachment_word); Ok(()) } @@ -117,9 +118,9 @@ async fn network_account_target_attachment_round_trip() -> anyhow::Result<()> { begin push.{exec_hint} - push.{target_id_suffix} push.{target_id_prefix} - # => [target_id_prefix, target_id_suffix, exec_hint] + push.{target_id_suffix} + # => [target_id_suffix, target_id_prefix, exec_hint] exec.network_account_target::new # => [attachment_scheme, attachment_kind, ATTACHMENT] exec.network_account_target::is_network_account_target @@ -127,7 +128,7 @@ async fn network_account_target_attachment_round_trip() -> anyhow::Result<()> { assert.err=ERR_NOT_NETWORK_ACCOUNT_TARGET # => [ATTACHMENT] exec.network_account_target::get_id - # => [target_id_prefix, target_id_suffix] + # => [target_id_suffix, target_id_prefix] # cleanup stack movup.2 drop movup.2 drop end @@ -139,8 +140,8 @@ async fn network_account_target_attachment_round_trip() -> anyhow::Result<()> { let exec_output = CodeExecutor::with_default_host().run(&source).await?; - assert_eq!(exec_output.stack[0], target_id.prefix().as_felt()); - assert_eq!(exec_output.stack[1], target_id.suffix()); + assert_eq!(exec_output.stack[0], target_id.suffix()); + assert_eq!(exec_output.stack[1], target_id.prefix().as_felt()); Ok(()) } diff --git a/crates/miden-testing/src/standards/note_tag.rs b/crates/miden-testing/src/standards/note_tag.rs index 0f476cb37c..27af5b8c23 100644 --- a/crates/miden-testing/src/standards/note_tag.rs +++ b/crates/miden-testing/src/standards/note_tag.rs @@ -34,7 +34,7 @@ async fn test_note_tag_account_target(#[case] tag_len: u8) -> anyhow::Result<()> ); let exec_output = CodeExecutor::with_default_host().run(&code).await?; - let actual_tag = exec_output.stack[0].as_int(); + let actual_tag = exec_output.stack[0].as_canonical_u64(); assert_eq!( actual_tag, diff --git a/crates/miden-testing/src/tx_context/builder.rs b/crates/miden-testing/src/tx_context/builder.rs index a81f3c95f8..b61a5a5e98 100644 --- a/crates/miden-testing/src/tx_context/builder.rs +++ b/crates/miden-testing/src/tx_context/builder.rs @@ -6,7 +6,8 @@ use alloc::sync::Arc; use alloc::vec::Vec; use anyhow::Context; -use miden_processor::{AdviceInputs, Felt, Word}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{Felt, Word}; use miden_protocol::EMPTY_WORD; use miden_protocol::account::auth::{PublicKeyCommitment, Signature}; use miden_protocol::account::{Account, AccountHeader, AccountId}; @@ -17,7 +18,7 @@ use miden_protocol::note::{Note, NoteId, NoteScript}; use miden_protocol::testing::account_id::ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE; use miden_protocol::testing::noop_auth_component::NoopAuthComponent; use miden_protocol::transaction::{ - OutputNote, + RawOutputNote, TransactionArgs, TransactionInputs, TransactionScript, @@ -43,7 +44,7 @@ use crate::{MockChain, MockChainNote}; /// ``` /// # use anyhow::Result; /// # use miden_testing::TransactionContextBuilder; -/// # use miden_protocol::{account::AccountBuilder,Felt, FieldElement}; +/// # use miden_protocol::{account::AccountBuilder, Felt}; /// # use miden_protocol::transaction::TransactionKernel; /// # /// # #[tokio::main(flavor = "current_thread")] @@ -62,7 +63,7 @@ use crate::{MockChain, MockChainNote}; /// "; /// /// let exec_output = tx_context.execute_code(code).await?; -/// assert_eq!(exec_output.stack.get(0).unwrap(), &Felt::new(5)); +/// assert_eq!(exec_output.stack.get(0).unwrap(), &Felt::from(5u32)); /// # Ok(()) /// # } /// ``` @@ -235,11 +236,10 @@ impl TransactionContextBuilder { } /// Extend the expected output notes. - pub fn extend_expected_output_notes(mut self, output_notes: Vec) -> Self { + pub fn extend_expected_output_notes(mut self, output_notes: Vec) -> Self { let output_notes = output_notes.into_iter().filter_map(|n| match n { - OutputNote::Full(note) => Some(note), - OutputNote::Partial(_) => None, - OutputNote::Header(_) => None, + RawOutputNote::Full(note) => Some(note), + RawOutputNote::Partial(_) => None, }); self.expected_output_notes.extend(output_notes); @@ -285,7 +285,7 @@ impl TransactionContextBuilder { let mut builder = MockChain::builder(); for i in self.input_notes { - builder.add_output_note(OutputNote::Full(i)); + builder.add_output_note(RawOutputNote::Full(i)); } let mut mock_chain = builder.build()?; diff --git a/crates/miden-testing/src/tx_context/context.rs b/crates/miden-testing/src/tx_context/context.rs index 1ff68e6280..c2ee158a6b 100644 --- a/crates/miden-testing/src/tx_context/context.rs +++ b/crates/miden-testing/src/tx_context/context.rs @@ -3,12 +3,13 @@ use alloc::collections::{BTreeMap, BTreeSet}; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{FutureMaybeSend, MastForest, MastForestStore, Word}; +use miden_processor::mast::MastForest; +use miden_processor::{ExecutionOutput, FutureMaybeSend, MastForestStore, Word}; use miden_protocol::account::{ Account, AccountId, PartialAccount, + StorageMapKey, StorageMapWitness, StorageSlotContent, }; @@ -89,7 +90,7 @@ impl TransactionContext { .iter() .flat_map(|note| note.note().assets().iter().map(Asset::vault_key)) .collect::>(); - let fee_asset_vault_key = AssetVaultKey::from_account_id( + let fee_asset_vault_key = AssetVaultKey::new_fungible( self.tx_inputs().block_header().fee_parameters().native_asset_id(), ) .expect("fee asset should be a fungible asset"); @@ -106,7 +107,7 @@ impl TransactionContext { // Add the vault key for the fee asset to the list of asset vault keys which may need to be // accessed at the end of the transaction. let fee_asset_vault_key = - AssetVaultKey::from_account_id(block_header.fee_parameters().native_asset_id()) + AssetVaultKey::new_fungible(block_header.fee_parameters().native_asset_id()) .expect("fee asset should be a fungible asset"); asset_vault_keys.insert(fee_asset_vault_key); @@ -327,7 +328,7 @@ impl DataStore for TransactionContext { &self, account_id: AccountId, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> impl FutureMaybeSend> { async move { if account_id == self.account().id() { diff --git a/crates/miden-testing/src/utils.rs b/crates/miden-testing/src/utils.rs index 1732ff3580..b16bc5a880 100644 --- a/crates/miden-testing/src/utils.rs +++ b/crates/miden-testing/src/utils.rs @@ -1,13 +1,15 @@ use alloc::string::String; use alloc::vec::Vec; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RandomCoin; +use miden_protocol::Word; use miden_protocol::account::AccountId; use miden_protocol::asset::Asset; use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::note::{Note, NoteType}; -use miden_protocol::testing::storage::prepare_assets; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteTag, NoteType}; use miden_standards::code_builder::CodeBuilder; +use miden_standards::note::P2idNoteStorage; use miden_standards::testing::note::NoteBuilder; use rand::SeedableRng; use rand::rngs::SmallRng; @@ -19,7 +21,7 @@ use rand::rngs::SmallRng; macro_rules! assert_execution_error { ($execution_result:expr, $expected_err:expr) => { match $execution_result { - Err($crate::ExecError(miden_processor::ExecutionError::FailedAssertion { label: _, source_file: _, clk: _, err_code, err_msg, err: _ })) => { + Err($crate::ExecError(miden_processor::ExecutionError::OperationError { label: _, source_file: _, err: miden_processor::operation::OperationError::FailedAssertion { err_code, err_msg } })) => { if let Some(ref msg) = err_msg { assert_eq!(msg.as_ref(), $expected_err.message(), "error messages did not match"); } @@ -41,13 +43,13 @@ macro_rules! assert_transaction_executor_error { ($execution_result:expr, $expected_err:expr) => { match $execution_result { Err(miden_tx::TransactionExecutorError::TransactionProgramExecutionFailed( - miden_processor::ExecutionError::FailedAssertion { + miden_processor::ExecutionError::OperationError { label: _, source_file: _, - clk: _, - err_code, - err_msg, - err: _, + err: miden_processor::operation::OperationError::FailedAssertion { + err_code, + err_msg, + }, }, )) => { if let Some(ref msg) = err_msg { @@ -78,7 +80,7 @@ pub fn create_public_p2any_note( sender: AccountId, assets: impl IntoIterator, ) -> Note { - let mut rng = RpoRandomCoin::new(Default::default()); + let mut rng = RandomCoin::new(Default::default()); create_p2any_note(sender, NoteType::Public, assets, &mut rng) } @@ -92,37 +94,36 @@ pub fn create_p2any_note( sender: AccountId, note_type: NoteType, assets: impl IntoIterator, - rng: &mut RpoRandomCoin, + rng: &mut RandomCoin, ) -> Note { let serial_number = rng.draw_word(); let assets: Vec<_> = assets.into_iter().collect(); let mut code_body = String::new(); - for i in 0..assets.len() { - if i == 0 { - // first asset (dest_ptr is already on stack) - code_body.push_str( - " - # add first asset - - padw dup.4 mem_loadw_be - padw swapw padw padw swapdw - call.wallet::receive_asset - dropw movup.12 - # => [dest_ptr, pad(12)] - ", - ); - } else { - code_body.push_str( - " - # add next asset + for asset_idx in 0..assets.len() { + code_body.push_str(&format!( + " + # => [dest_ptr] + + # current_asset_ptr = dest_ptr + ASSET_SIZE * asset_idx + dup push.ASSET_SIZE mul.{asset_idx} + # => [current_asset_ptr, dest_ptr] + + padw dup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le + # => [ASSET_VALUE, current_asset_ptr, dest_ptr] + + padw movup.8 mem_loadw_le + # => [ASSET_KEY, ASSET_VALUE, current_asset_ptr, dest_ptr] + + padw padw swapdw + # => [ASSET_KEY, ASSET_VALUE, pad(12), dest_ptr] - add.4 dup movdn.13 - padw movup.4 mem_loadw_be call.wallet::receive_asset - dropw movup.12 - # => [dest_ptr, pad(12)]", - ); - } + # => [pad(16), dest_ptr] + + dropw dropw dropw dropw + # => [dest_ptr] + ", + )); } code_body.push_str("dropw dropw dropw dropw"); @@ -130,6 +131,8 @@ pub fn create_p2any_note( r#" use mock::account use miden::protocol::active_note + use ::miden::protocol::asset::ASSET_VALUE_MEMORY_OFFSET + use ::miden::protocol::asset::ASSET_SIZE use miden::standards::wallets::basic->wallet begin @@ -209,10 +212,10 @@ fn note_script_that_creates_notes<'note>( // Make sure that the transaction's native account matches the note sender. out.push_str(&format!( r#"exec.::miden::protocol::native_account::get_id - # => [native_account_id_prefix, native_account_id_suffix] - push.{sender_prefix} assert_eq.err="sender ID prefix does not match native account ID's prefix" - # => [native_account_id_suffix] + # => [native_account_id_suffix, native_account_id_prefix] push.{sender_suffix} assert_eq.err="sender ID suffix does not match native account ID's suffix" + # => [native_account_id_prefix] + push.{sender_prefix} assert_eq.err="sender ID prefix does not match native account ID's prefix" # => [] "#, sender_prefix = sender_id.prefix().as_felt(), @@ -250,11 +253,17 @@ fn note_script_that_creates_notes<'note>( attachment_kind = note.metadata().attachment().content().attachment_kind().as_u8(), )); - let assets_str = prepare_assets(note.assets()); - for asset in assets_str { + for asset in note.assets().iter() { out.push_str(&format!( - " push.{asset} - call.::miden::standards::wallets::basic::move_asset_to_note\n", + " dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] + call.::miden::standards::wallets::basic::move_asset_to_note + # => [note_idx] + ", + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), )); } } @@ -263,3 +272,21 @@ fn note_script_that_creates_notes<'note>( Ok(out) } + +/// Generates a P2ID note - Pay-to-ID note with an exact serial number +pub fn create_p2id_note_exact( + sender: AccountId, + target: AccountId, + assets: Vec, + note_type: NoteType, + serial_num: Word, +) -> Result { + let recipient = P2idNoteStorage::new(target).into_recipient(serial_num); + + let tag = NoteTag::with_account_target(target); + + let metadata = NoteMetadata::new(sender, note_type).with_tag(tag); + let vault = NoteAssets::new(assets)?; + + Ok(Note::new(vault, metadata, recipient)) +} diff --git a/crates/miden-testing/tests/agglayer/asset_conversion.rs b/crates/miden-testing/tests/agglayer/asset_conversion.rs index c37b1c206b..8a6a30ac3e 100644 --- a/crates/miden-testing/tests/agglayer/asset_conversion.rs +++ b/crates/miden-testing/tests/agglayer/asset_conversion.rs @@ -1,44 +1,36 @@ extern crate alloc; -use alloc::sync::Arc; - -use miden_agglayer::{agglayer_library, utils}; -use miden_assembly::{Assembler, DefaultSourceManager}; -use miden_core_lib::CoreLibrary; -use miden_processor::fast::ExecutionOutput; +use miden_agglayer::errors::{ + ERR_REMAINDER_TOO_LARGE, + ERR_SCALE_AMOUNT_EXCEEDED_LIMIT, + ERR_UNDERFLOW, + ERR_X_TOO_LARGE, +}; +use miden_agglayer::eth_types::amount::EthAmount; +use miden_processor::utils::packed_u32_elements_to_bytes; use miden_protocol::Felt; +use miden_protocol::asset::FungibleAsset; +use miden_protocol::errors::MasmError; use primitive_types::U256; +use rand::rngs::StdRng; +use rand::{Rng, SeedableRng}; -use super::test_utils::execute_program_with_default_host; - -/// Convert a Vec to a U256 -fn felts_to_u256(felts: Vec) -> U256 { - assert_eq!(felts.len(), 8, "expected exactly 8 felts"); - let array: [Felt; 8] = - [felts[0], felts[1], felts[2], felts[3], felts[4], felts[5], felts[6], felts[7]]; - let bytes = utils::felts_to_u256_bytes(array); - U256::from_little_endian(&bytes) -} +use super::test_utils::{assert_execution_fails_with, execute_masm_script}; -/// Convert the top 8 u32 values from the execution stack to a U256 -fn stack_to_u256(exec_output: &ExecutionOutput) -> U256 { - let felts: Vec = exec_output.stack[0..8].to_vec(); - felts_to_u256(felts) -} +// ================================================================================================ +// SCALE UP TESTS (Felt -> U256) +// ================================================================================================ -/// Helper function to test convert_felt_to_u256_scaled with given parameters -async fn test_convert_to_u256_helper( +/// Helper function to test scale_native_amount_to_u256 with given parameters +async fn test_scale_up_helper( miden_amount: Felt, scale_exponent: Felt, - expected_result_array: [u32; 8], - expected_result_u256: U256, + expected_result: EthAmount, ) -> anyhow::Result<()> { - let asset_conversion_lib = agglayer_library(); - let script_code = format!( " use miden::core::sys - use miden::agglayer::asset_conversion + use agglayer::common::asset_conversion begin push.{}.{} @@ -49,53 +41,35 @@ async fn test_convert_to_u256_helper( scale_exponent, miden_amount, ); - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(asset_conversion_lib.clone()) - .unwrap() - .assemble_program(&script_code) - .unwrap(); - - let exec_output = execute_program_with_default_host(program, None).await?; - - // Extract the first 8 u32 values from the stack (the U256 representation) - let actual_result: [u32; 8] = [ - exec_output.stack[0].as_int() as u32, - exec_output.stack[1].as_int() as u32, - exec_output.stack[2].as_int() as u32, - exec_output.stack[3].as_int() as u32, - exec_output.stack[4].as_int() as u32, - exec_output.stack[5].as_int() as u32, - exec_output.stack[6].as_int() as u32, - exec_output.stack[7].as_int() as u32, - ]; + let exec_output = execute_masm_script(&script_code).await?; + let actual_felts: Vec = exec_output.stack[0..8].to_vec(); - let actual_result_u256 = stack_to_u256(&exec_output); + // to_elements() returns big-endian limb order with each limb byte-swapped (LE-interpreted + // from BE source bytes). The scale-up output is native u32 limbs in LE limb order, so we + // reverse the limbs and swap bytes within each u32 to match. + let expected_felts: Vec = expected_result + .to_elements() + .into_iter() + .rev() + .map(|f| Felt::new((f.as_canonical_u64() as u32).swap_bytes() as u64)) + .collect(); - assert_eq!(actual_result, expected_result_array); - assert_eq!(actual_result_u256, expected_result_u256); + assert_eq!(actual_felts, expected_felts); Ok(()) } #[tokio::test] -async fn test_convert_to_u256_basic_examples() -> anyhow::Result<()> { +async fn test_scale_up_basic_examples() -> anyhow::Result<()> { // Test case 1: amount=1, no scaling (scale_exponent=0) - test_convert_to_u256_helper( - Felt::new(1), - Felt::new(0), - [1, 0, 0, 0, 0, 0, 0, 0], - U256::from(1u64), - ) - .await?; + test_scale_up_helper(Felt::new(1), Felt::new(0), EthAmount::from_uint_str("1").unwrap()) + .await?; // Test case 2: amount=1, scale to 1e18 (scale_exponent=18) - test_convert_to_u256_helper( + test_scale_up_helper( Felt::new(1), Felt::new(18), - [2808348672, 232830643, 0, 0, 0, 0, 0, 0], - U256::from_dec_str("1000000000000000000").unwrap(), + EthAmount::from_uint_str("1000000000000000000").unwrap(), ) .await?; @@ -103,87 +77,301 @@ async fn test_convert_to_u256_basic_examples() -> anyhow::Result<()> { } #[tokio::test] -async fn test_convert_to_u256_scaled_eth() -> anyhow::Result<()> { - // 100 units base 1e6 - let miden_amount = Felt::new(100_000_000); +async fn test_scale_up_realistic_amounts() -> anyhow::Result<()> { + // 100 units base 1e6, scale to 1e18 + test_scale_up_helper( + Felt::new(100_000_000), + Felt::new(12), + EthAmount::from_uint_str("100000000000000000000").unwrap(), + ) + .await?; - // scale to 1e18 - let target_scale = Felt::new(12); + // Large amount: 1e18 units scaled by 8 + test_scale_up_helper( + Felt::new(1000000000000000000), + Felt::new(8), + EthAmount::from_uint_str("100000000000000000000000000").unwrap(), + ) + .await?; - let asset_conversion_lib = agglayer_library(); + Ok(()) +} - let script_code = format!( - " +#[tokio::test] +async fn test_scale_up_exceeds_max_scale() { + // scale_exp = 19 should fail + let script_code = " use miden::core::sys - use miden::agglayer::asset_conversion + use agglayer::common::asset_conversion begin - push.{}.{} + push.19.1 exec.asset_conversion::scale_native_amount_to_u256 exec.sys::truncate_stack end - ", - target_scale, miden_amount, - ); + "; - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(asset_conversion_lib.clone()) - .unwrap() - .assemble_program(&script_code) - .unwrap(); + assert_execution_fails_with(script_code, "maximum scaling factor is 18").await; +} - let exec_output = execute_program_with_default_host(program, None).await?; +// ================================================================================================ +// SCALE DOWN TESTS (U256 -> Felt) +// ================================================================================================ - let expected_result = U256::from_dec_str("100000000000000000000").unwrap(); - let actual_result = stack_to_u256(&exec_output); +/// Build MASM script for verify_u256_to_native_amount_conversion +fn build_scale_down_script(x: EthAmount, scale_exp: u32, y: u64) -> String { + let x_felts = x.to_elements(); + format!( + r#" + use miden::core::sys + use agglayer::common::asset_conversion + + begin + push.{}.{}.{}.{}.{}.{}.{}.{}.{}.{} + exec.asset_conversion::verify_u256_to_native_amount_conversion + exec.sys::truncate_stack + end + "#, + y, + scale_exp, + x_felts[7].as_canonical_u64(), + x_felts[6].as_canonical_u64(), + x_felts[5].as_canonical_u64(), + x_felts[4].as_canonical_u64(), + x_felts[3].as_canonical_u64(), + x_felts[2].as_canonical_u64(), + x_felts[1].as_canonical_u64(), + x_felts[0].as_canonical_u64(), + ) +} - assert_eq!(actual_result, expected_result); +/// Assert that scaling down succeeds with the correct result +async fn assert_scale_down_ok(x: EthAmount, scale: u32) -> anyhow::Result { + let y = x.scale_to_token_amount(scale).unwrap().as_canonical_u64(); + let script = build_scale_down_script(x, scale, y); + let output = execute_masm_script(&script).await?; + assert_eq!(output.stack.as_slice(), &[Felt::ZERO; 16], "expected empty stack"); + Ok(y) +} + +/// Assert that scaling down fails with the given y and expected error +async fn assert_scale_down_fails(x: EthAmount, scale: u32, y: u64, expected_error: MasmError) { + let script = build_scale_down_script(x, scale, y); + assert_execution_fails_with(&script, expected_error.message()).await; +} +/// Test that y-1 and y+1 both fail appropriately +async fn assert_y_plus_minus_one_behavior(x: EthAmount, scale: u32) -> anyhow::Result<()> { + let y = assert_scale_down_ok(x, scale).await?; + if y > 0 { + assert_scale_down_fails(x, scale, y - 1, ERR_REMAINDER_TOO_LARGE).await; + } + assert_scale_down_fails(x, scale, y + 1, ERR_UNDERFLOW).await; Ok(()) } #[tokio::test] -async fn test_convert_to_u256_scaled_large_amount() -> anyhow::Result<()> { - // 100,000,000 units (base 1e10) - let miden_amount = Felt::new(1000000000000000000); +async fn test_scale_down_basic_examples() -> anyhow::Result<()> { + let cases = [ + (EthAmount::from_uint_str("1000000000000000000").unwrap(), 10u32), + (EthAmount::from_uint_str("1000").unwrap(), 0u32), + (EthAmount::from_uint_str("10000000000000000000").unwrap(), 18u32), + ]; + + for (x, s) in cases { + assert_scale_down_ok(x, s).await?; + } + Ok(()) +} + +// ================================================================================================ +// FUZZING TESTS +// ================================================================================================ + +// Fuzz test that validates verify_u256_to_native_amount_conversion (U256 → Felt) +// with random realistic amounts for all scale exponents (0..=18). +#[tokio::test] +async fn test_scale_down_realistic_scenarios_fuzzing() -> anyhow::Result<()> { + const CASES_PER_SCALE: usize = 2; + const MAX_SCALE: u32 = 18; + + let mut rng = StdRng::seed_from_u64(42); + + let min_x = U256::from(10_000_000_000_000u64); // 1e13 + let desired_max_x = U256::from_dec_str("1000000000000000000000000").unwrap(); // 1e24 + let max_y = U256::from(FungibleAsset::MAX_AMOUNT); // 2^63 - 2^31 + + for scale in 0..=MAX_SCALE { + let scale_factor = U256::from(10u64).pow(U256::from(scale)); - // scale to base 1e18 - let scale_exponent = Felt::new(8); + // Ensure x always scales down into a y that fits the fungible-token bound. + let max_x = desired_max_x.min(max_y * scale_factor); - let asset_conversion_lib = agglayer_library(); + assert!(max_x > min_x, "max_x must exceed min_x for scale={scale}"); + // Sample x uniformly from [min_x, max_x). + let span: u128 = (max_x - min_x).try_into().expect("span fits in u128"); + + for _ in 0..CASES_PER_SCALE { + let offset: u128 = rng.random_range(0..span); + let x = EthAmount::from_u256(min_x + U256::from(offset)); + assert_scale_down_ok(x, scale).await?; + } + } + + Ok(()) +} + +// ================================================================================================ +// NEGATIVE TESTS +// ================================================================================================ + +#[tokio::test] +async fn test_scale_down_wrong_y_clean_case() -> anyhow::Result<()> { + let x = EthAmount::from_uint_str("10000000000000000000").unwrap(); + assert_y_plus_minus_one_behavior(x, 18).await +} + +#[tokio::test] +async fn test_scale_down_wrong_y_with_remainder() -> anyhow::Result<()> { + let x = EthAmount::from_uint_str("1500000000000000000").unwrap(); + assert_y_plus_minus_one_behavior(x, 18).await +} + +// ================================================================================================ +// NEGATIVE TESTS - BOUNDS +// ================================================================================================ + +#[tokio::test] +async fn test_scale_down_exceeds_max_scale() { + let x = EthAmount::from_uint_str("1000").unwrap(); + let s = 19u32; + let y = 1u64; + assert_scale_down_fails(x, s, y, ERR_SCALE_AMOUNT_EXCEEDED_LIMIT).await; +} + +#[tokio::test] +async fn test_scale_down_x_too_large() { + // Construct x with upper limbs non-zero (>= 2^128) + let x = EthAmount::from_u256(U256::from(1u64) << 128); + let s = 0u32; + let y = 0u64; + assert_scale_down_fails(x, s, y, ERR_X_TOO_LARGE).await; +} + +// ================================================================================================ +// REMAINDER EDGE TEST +// ================================================================================================ + +#[tokio::test] +async fn test_scale_down_remainder_edge() -> anyhow::Result<()> { + // Force z = scale - 1: pick y=5, s=10, so scale=10^10 + // Set x = y*scale + (scale-1) = 5*10^10 + (10^10 - 1) = 59999999999 + let scale_exp = 10u32; + let scale = 10u64.pow(scale_exp); + let x_val = 5u64 * scale + (scale - 1); + let x = EthAmount::from_u256(U256::from(x_val)); + + assert_scale_down_ok(x, scale_exp).await?; + Ok(()) +} + +#[tokio::test] +async fn test_scale_down_remainder_exactly_scale_fails() { + // If remainder z = scale, it should fail + // Pick s=10, x = 6*scale (where scale = 10^10) + // The correct y should be 6, so providing y=5 should fail + let scale_exp = 10u32; + let scale = 10u64.pow(scale_exp); + let x = EthAmount::from_u256(U256::from(6u64 * scale)); + + // Calculate the correct y using scale_to_token_amount + let correct_y = x.scale_to_token_amount(scale_exp).unwrap().as_canonical_u64(); + assert_eq!(correct_y, 6); + + // Providing wrong_y = correct_y - 1 should fail with ERR_REMAINDER_TOO_LARGE + let wrong_y = correct_y - 1; + assert_scale_down_fails(x, scale_exp, wrong_y, ERR_REMAINDER_TOO_LARGE).await; +} + +// ================================================================================================ +// INLINE SCALE DOWN TEST +// ================================================================================================ + +#[tokio::test] +async fn test_verify_scale_down_inline() -> anyhow::Result<()> { + // Test: Take 100 * 1e18 and scale to base 1e8 + // This means we divide by 1e10 (scale_exp = 10) + // x = 100 * 1e18 = 100000000000000000000 + // y = x / 1e10 = 10000000000 (100 * 1e8) + let x = EthAmount::from_uint_str("100000000000000000000").unwrap(); + let scale_exp = 10u32; + let y = x.scale_to_token_amount(scale_exp).unwrap().as_canonical_u64(); + + let x_felts = x.to_elements(); + + // Build the MASM script inline let script_code = format!( - " + r#" use miden::core::sys - use miden::agglayer::asset_conversion - + use agglayer::common::asset_conversion + begin - push.{}.{} - - exec.asset_conversion::scale_native_amount_to_u256 + # Push expected quotient y used for verification (not returned as an output) + push.{} + + # Push scale_exp + push.{} + + # Push x as 8 u32 limbs in the order expected by the verifier + push.{}.{}.{}.{}.{}.{}.{}.{} + + # Call the scale down procedure (verifies conversion and may panic on failure) + exec.asset_conversion::verify_u256_to_native_amount_conversion + + # Truncate stack so the program returns with no public outputs (Outputs: []) exec.sys::truncate_stack end - ", - scale_exponent, miden_amount, + "#, + y, + scale_exp, + x_felts[7].as_canonical_u64(), + x_felts[6].as_canonical_u64(), + x_felts[5].as_canonical_u64(), + x_felts[4].as_canonical_u64(), + x_felts[3].as_canonical_u64(), + x_felts[2].as_canonical_u64(), + x_felts[1].as_canonical_u64(), + x_felts[0].as_canonical_u64(), ); - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(asset_conversion_lib.clone()) - .unwrap() - .assemble_program(&script_code) - .unwrap(); + // Execute the script - verify_u256_to_native_amount_conversion panics on invalid + // conversions, so successful execution is sufficient validation + execute_masm_script(&script_code).await?; - let exec_output = execute_program_with_default_host(program, None).await?; - - let expected_result = U256::from_dec_str("100000000000000000000000000").unwrap(); - let actual_result = stack_to_u256(&exec_output); - - assert_eq!(actual_result, expected_result); + Ok(()) +} +/// Exercises u128_sub_no_underflow when x > 2^64, so x has distinct high limbs (x2 != x3). +/// +/// The u128 subtraction splits each 128-bit operand into two 64-bit halves. This test +/// ensures the high-half subtraction and borrow propagation work correctly when x_high +/// is non-zero. +#[tokio::test] +async fn test_scale_down_high_limb_subtraction() -> anyhow::Result<()> { + let x_val = U256::from_dec_str("18999999999999999999").unwrap(); + + // Verify the u32 limb structure that makes this test meaningful: + // x = x0 + x1*2^32 + x2*2^64 + x3*2^96 + // x2 and x3 must differ - otherwise the high subtraction is trivially correct + // regardless of limb ordering. + let x2 = ((x_val >> 64) & U256::from(u32::MAX)).as_u32(); + let x3 = ((x_val >> 96) & U256::from(u32::MAX)).as_u32(); + assert_eq!(x2, 1, "x2 must be non-zero for the high subtraction to be non-trivial"); + assert_eq!(x3, 0, "x3 must differ from x2"); + + let x = EthAmount::from_u256(x_val); + assert_scale_down_ok(x, 18).await?; Ok(()) } @@ -199,7 +387,7 @@ fn test_felts_to_u256_bytes_sequential_values() { Felt::new(7), Felt::new(8), ]; - let result = utils::felts_to_u256_bytes(limbs); + let result = packed_u32_elements_to_bytes(&limbs); assert_eq!(result.len(), 32); // Verify the byte layout: limbs are processed in little-endian order, each as little-endian u32 @@ -214,13 +402,13 @@ fn test_felts_to_u256_bytes_sequential_values() { fn test_felts_to_u256_bytes_edge_cases() { // Test case 1: All zeros (minimum) let limbs = [Felt::new(0); 8]; - let result = utils::felts_to_u256_bytes(limbs); + let result = packed_u32_elements_to_bytes(&limbs); assert_eq!(result.len(), 32); assert!(result.iter().all(|&b| b == 0)); // Test case 2: All max u32 values (maximum) let limbs = [Felt::new(u32::MAX as u64); 8]; - let result = utils::felts_to_u256_bytes(limbs); + let result = packed_u32_elements_to_bytes(&limbs); assert_eq!(result.len(), 32); assert!(result.iter().all(|&b| b == 255)); } diff --git a/crates/miden-testing/tests/agglayer/bridge_in.rs b/crates/miden-testing/tests/agglayer/bridge_in.rs index 173e6a8d57..16d023d192 100644 --- a/crates/miden-testing/tests/agglayer/bridge_in.rs +++ b/crates/miden-testing/tests/agglayer/bridge_in.rs @@ -1,228 +1,586 @@ extern crate alloc; -use core::slice; +use alloc::slice; +use alloc::string::String; -use miden_agglayer::claim_note::{ExitRoot, SmtNode}; +use anyhow::Context; +use miden_agglayer::claim_note::Keccak256Output; +use miden_agglayer::errors::ERR_CLAIM_ALREADY_SPENT; use miden_agglayer::{ ClaimNoteStorage, - EthAddressFormat, - EthAmount, - LeafData, - OutputNoteData, - ProofData, + ConfigAggBridgeNote, + ExitRoot, + SmtNode, + UpdateGerNote, + agglayer_library, create_claim_note, create_existing_agglayer_faucet, create_existing_bridge_account, }; use miden_protocol::Felt; use miden_protocol::account::Account; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::{Asset, FungibleAsset}; +use miden_protocol::crypto::SequentialCommit; use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::note::{ - Note, - NoteAssets, - NoteMetadata, - NoteRecipient, - NoteStorage, - NoteTag, - NoteType, -}; -use miden_protocol::transaction::OutputNote; +use miden_protocol::note::NoteType; +use miden_protocol::testing::account_id::ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE; +use miden_protocol::transaction::RawOutputNote; use miden_standards::account::wallets::BasicWallet; -use miden_standards::note::StandardNote; -use miden_testing::{AccountState, Auth, MockChain}; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::note::P2idNote; +use miden_standards::testing::account_component::IncrNonceAuthComponent; +use miden_standards::testing::mock_account::MockAccountExt; +use miden_testing::utils::create_p2id_note_exact; +use miden_testing::{AccountState, Auth, MockChain, TransactionContextBuilder}; +use miden_tx::utils::hex_to_bytes; use rand::Rng; -use super::test_utils::claim_note_test_inputs; +use super::test_utils::{ + ClaimDataSource, + MerkleProofVerificationFile, + SOLIDITY_MERKLE_PROOF_VECTORS, +}; + +// HELPER FUNCTIONS +// ================================================================================================ + +fn merkle_proof_verification_code( + index: usize, + merkle_paths: &MerkleProofVerificationFile, +) -> String { + let mut store_path_source = String::new(); + for height in 0..32 { + let path_node = merkle_paths.merkle_paths[index * 32 + height].as_str(); + let smt_node = SmtNode::from(hex_to_bytes(path_node).unwrap()); + let [node_lo, node_hi] = smt_node.to_words(); + store_path_source.push_str(&format!( + " + \tpush.{node_lo} mem_storew_le.{} dropw + \tpush.{node_hi} mem_storew_le.{} dropw + ", + height * 8, + height * 8 + 4 + )); + } + + let root = ExitRoot::from(hex_to_bytes(&merkle_paths.roots[index]).unwrap()); + let [root_lo, root_hi] = root.to_words(); + + let leaf = Keccak256Output::from(hex_to_bytes(&merkle_paths.leaves[index]).unwrap()); + let [leaf_lo, leaf_hi] = leaf.to_words(); + + format!( + r#" + use agglayer::bridge::bridge_in + + begin + {store_path_source} + + push.{root_lo} mem_storew_le.256 dropw + push.{root_hi} mem_storew_le.260 dropw + + push.256 + push.{index} + push.0 + push.{leaf_hi} + push.{leaf_lo} + + exec.bridge_in::verify_merkle_proof + assert.err="verification failed" + end + "# + ) +} -/// Tests the bridge-in flow: CLAIM note -> Aggfaucet (FPI to Bridge) -> P2ID note created. +/// Tests the bridge-in flow with the new 2-transaction architecture: +/// +/// TX0: CONFIG_AGG_BRIDGE → bridge (registers faucet + token address in registries) +/// TX1: UPDATE_GER → bridge (stores GER) +/// TX2: CLAIM → bridge (validates proof, creates MINT note) +/// TX3: MINT → aggfaucet (mints asset, creates P2ID note) +/// TX4: P2ID → destination (simulated case only) +/// +/// Parameterized over two claim data sources: +/// - [`ClaimDataSource::Real`]: uses real [`ProofData`] and [`LeafData`] from +/// `claim_asset_vectors_real_tx.json`, captured from an actual on-chain `claimAsset` transaction. +/// - [`ClaimDataSource::Simulated`]: uses locally generated [`ProofData`] and [`LeafData`] from +/// `claim_asset_vectors_local_tx.json`, produced by simulating a `bridgeAsset()` call. +/// +/// Note: Modifying anything in the real test vectors would invalidate the Merkle proof, +/// as the proof was computed for the original leaf data including the original destination. +#[rstest::rstest] +#[case::real(ClaimDataSource::Real)] +#[case::simulated(ClaimDataSource::Simulated)] +#[case::rollup(ClaimDataSource::Rollup)] #[tokio::test] -async fn test_bridge_in_claim_to_p2id() -> anyhow::Result<()> { +async fn test_bridge_in_claim_to_p2id(#[case] data_source: ClaimDataSource) -> anyhow::Result<()> { + use miden_agglayer::AggLayerBridge; + let mut builder = MockChain::builder(); - // CREATE BRIDGE ACCOUNT (with bridge_out component for MMR validation) + // CREATE BRIDGE ADMIN ACCOUNT (sends CONFIG_AGG_BRIDGE notes) + // -------------------------------------------------------------------------------------------- + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT (sends the UPDATE_GER note) + // -------------------------------------------------------------------------------------------- + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE BRIDGE ACCOUNT // -------------------------------------------------------------------------------------------- let bridge_seed = builder.rng_mut().draw_word(); - let bridge_account = create_existing_bridge_account(bridge_seed); + let bridge_account = + create_existing_bridge_account(bridge_seed, bridge_admin.id(), ger_manager.id()); builder.add_account(bridge_account.clone())?; + // GET CLAIM DATA FROM JSON (source depends on the test case) + // -------------------------------------------------------------------------------------------- + let (proof_data, leaf_data, ger, cgi_chain_hash) = data_source.get_data(); + // CREATE AGGLAYER FAUCET ACCOUNT (with agglayer_faucet component) + // Use the origin token address and network from the claim data. // -------------------------------------------------------------------------------------------- let token_symbol = "AGG"; let decimals = 8u8; - let max_supply = Felt::new(1000000); + let max_supply = Felt::new(FungibleAsset::MAX_AMOUNT); let agglayer_faucet_seed = builder.rng_mut().draw_word(); + let origin_token_address = leaf_data.origin_token_address; + let origin_network = leaf_data.origin_network; + let scale = 10u8; + let agglayer_faucet = create_existing_agglayer_faucet( agglayer_faucet_seed, token_symbol, decimals, max_supply, + Felt::ZERO, bridge_account.id(), + &origin_token_address, + origin_network, + scale, + leaf_data.metadata_hash, ); builder.add_account(agglayer_faucet.clone())?; - // CREATE USER ACCOUNT TO RECEIVE P2ID NOTE + // Get the destination account ID from the leaf data. + // This requires the destination_address to be in the embedded Miden AccountId format + // (first 4 bytes must be zero). + let destination_account_id = leaf_data + .destination_address + .to_account_id() + .expect("destination address is not an embedded Miden AccountId"); + + // For the simulated/rollup case, create the destination account so we can consume the P2ID note + let destination_account = if matches!( + data_source, + ClaimDataSource::Simulated | ClaimDataSource::Rollup + ) { + let dest = + Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, IncrNonceAuthComponent); + // Ensure the mock account ID matches the destination embedded in the JSON test vector, + // since the claim note targets this account ID. + assert_eq!( + dest.id(), + destination_account_id, + "mock destination account ID must match the destination_account_id from the claim data" + ); + builder.add_account(dest.clone())?; + Some(dest) + } else { + None + }; + + // CREATE SENDER ACCOUNT (for creating the claim note) // -------------------------------------------------------------------------------------------- - let user_account_builder = + let sender_account_builder = Account::builder(builder.rng_mut().random()).with_component(BasicWallet); - let user_account = builder.add_account_from_builder( + let sender_account = builder.add_account_from_builder( Auth::IncrNonce, - user_account_builder, + sender_account_builder, AccountState::Exists, )?; - // CREATE CLAIM NOTE WITH P2ID OUTPUT NOTE DETAILS + // CREATE CLAIM NOTE (now targets the bridge, not the faucet) // -------------------------------------------------------------------------------------------- - // Define amount values for the test - let claim_amount = 100u32; + // The P2ID serial number is derived from the PROOF_DATA_KEY (RPO hash of proof data) + let serial_num = proof_data.to_commitment(); - // Create CLAIM note using the new test inputs function - let ( - smt_proof_local_exit_root, - smt_proof_rollup_exit_root, - global_index, - mainnet_exit_root, - rollup_exit_root, - origin_network, - origin_token_address, - destination_network, - metadata, - ) = claim_note_test_inputs(); - - // Convert AccountId to destination address bytes in the test - let destination_address = EthAddressFormat::from_account_id(user_account.id()).into_bytes(); - - // Generate a serial number for the P2ID note - let serial_num = builder.rng_mut().draw_word(); - - // Convert amount to EthAmount for the LeafData - let amount_eth = EthAmount::from_u32(claim_amount); - - // Convert Vec<[u8; 32]> to [SmtNode; 32] for SMT proofs - let local_proof_array: [SmtNode; 32] = smt_proof_local_exit_root[0..32] - .iter() - .map(|&bytes| SmtNode::from(bytes)) - .collect::>() - .try_into() - .expect("should have exactly 32 elements"); - - let rollup_proof_array: [SmtNode; 32] = smt_proof_rollup_exit_root[0..32] - .iter() - .map(|&bytes| SmtNode::from(bytes)) - .collect::>() - .try_into() - .expect("should have exactly 32 elements"); - - let proof_data = ProofData { - smt_proof_local_exit_root: local_proof_array, - smt_proof_rollup_exit_root: rollup_proof_array, - global_index, - mainnet_exit_root: ExitRoot::from(mainnet_exit_root), - rollup_exit_root: ExitRoot::from(rollup_exit_root), - }; + // Calculate the scaled-down Miden amount using the faucet's scale factor + let miden_claim_amount = leaf_data + .amount + .scale_to_token_amount(scale as u32) + .expect("amount should scale successfully"); - let leaf_data = LeafData { - origin_network, - origin_token_address: EthAddressFormat::new(origin_token_address), - destination_network, - destination_address: EthAddressFormat::new(destination_address), - amount: amount_eth, - metadata, + let claim_inputs = ClaimNoteStorage { + proof_data, + leaf_data, + miden_claim_amount, }; - let output_note_data = OutputNoteData { - output_p2id_serial_num: serial_num, - target_faucet_account_id: agglayer_faucet.id(), - output_note_tag: NoteTag::with_account_target(user_account.id()), - }; - - let claim_inputs = ClaimNoteStorage { proof_data, leaf_data, output_note_data }; + let claim_note = create_claim_note( + claim_inputs, + bridge_account.id(), // Target the bridge, not the faucet + sender_account.id(), + builder.rng_mut(), + )?; - let claim_note = create_claim_note(claim_inputs, user_account.id(), builder.rng_mut())?; + // Add the claim note to the builder before building the mock chain + builder.add_output_note(RawOutputNote::Full(claim_note.clone())); - // Create P2ID note for the user account (similar to network faucet test) - let p2id_script = StandardNote::P2ID.script(); - let p2id_inputs = vec![user_account.id().suffix(), user_account.id().prefix().as_felt()]; - let note_storage = NoteStorage::new(p2id_inputs)?; - let p2id_recipient = NoteRecipient::new(serial_num, p2id_script.clone(), note_storage); + // CREATE CONFIG_AGG_BRIDGE NOTE (registers faucet + token address in bridge) + // -------------------------------------------------------------------------------------------- + let config_note = ConfigAggBridgeNote::create( + agglayer_faucet.id(), + &origin_token_address, + bridge_admin.id(), + bridge_account.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(config_note.clone())); - // Add the claim note to the builder before building the mock chain - builder.add_output_note(OutputNote::Full(claim_note.clone())); + // CREATE UPDATE_GER NOTE WITH GLOBAL EXIT ROOT + // -------------------------------------------------------------------------------------------- + let update_ger_note = + UpdateGerNote::create(ger, ger_manager.id(), bridge_account.id(), builder.rng_mut())?; + builder.add_output_note(RawOutputNote::Full(update_ger_note.clone())); // BUILD MOCK CHAIN WITH ALL ACCOUNTS // -------------------------------------------------------------------------------------------- let mut mock_chain = builder.clone().build()?; + + // TX0: EXECUTE CONFIG_AGG_BRIDGE NOTE TO REGISTER FAUCET IN BRIDGE + // -------------------------------------------------------------------------------------------- + let config_tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[config_note.id()], &[])? + .build()?; + let config_executed = config_tx_context.execute().await?; + + mock_chain.add_pending_executed_transaction(&config_executed)?; mock_chain.prove_next_block()?; - // CREATE EXPECTED P2ID NOTE FOR VERIFICATION + // TX1: EXECUTE UPDATE_GER NOTE TO STORE GER IN BRIDGE ACCOUNT // -------------------------------------------------------------------------------------------- - let amount_felt = Felt::from(claim_amount); - let mint_asset: Asset = FungibleAsset::new(agglayer_faucet.id(), amount_felt.into())?.into(); - let output_note_tag = NoteTag::with_account_target(user_account.id()); - let expected_p2id_note = Note::new( - NoteAssets::new(vec![mint_asset])?, - NoteMetadata::new(agglayer_faucet.id(), NoteType::Public).with_tag(output_note_tag), - p2id_recipient, - ); + let update_ger_tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[update_ger_note.id()], &[])? + .build()?; + let update_ger_executed = update_ger_tx_context.execute().await?; + + mock_chain.add_pending_executed_transaction(&update_ger_executed)?; + mock_chain.prove_next_block()?; + + // TX2: EXECUTE CLAIM NOTE AGAINST BRIDGE (validates proof, creates MINT note) + // -------------------------------------------------------------------------------------------- + let faucet_foreign_inputs = mock_chain.get_foreign_account_inputs(agglayer_faucet.id())?; + let claim_tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[], &[claim_note])? + .foreign_accounts(vec![faucet_foreign_inputs]) + .build()?; + + let claim_executed = claim_tx_context + .execute() + .await + .context("TX2: CLAIM note execution against bridge failed")?; + + // VERIFY CGI CHAIN HASH WAS SUCCESSFULLY UPDATED + // -------------------------------------------------------------------------------------------- + + let mut updated_bridge_account = bridge_account.clone(); + updated_bridge_account.apply_delta(claim_executed.account_delta())?; + + let actual_cgi_chain_hash = AggLayerBridge::cgi_chain_hash(&updated_bridge_account)?; + + assert_eq!(cgi_chain_hash, actual_cgi_chain_hash); - // EXECUTE CLAIM NOTE AGAINST AGGLAYER FAUCET (with FPI to Bridge) + // VERIFY MINT NOTE WAS CREATED BY THE BRIDGE // -------------------------------------------------------------------------------------------- - let foreign_account_inputs = mock_chain.get_foreign_account_inputs(bridge_account.id())?; + assert_eq!(claim_executed.output_notes().num_notes(), 1); + let mint_output_note = claim_executed.output_notes().get_note(0); + + // Verify the MINT note was sent by the bridge + assert_eq!(mint_output_note.metadata().sender(), bridge_account.id()); + assert_eq!(mint_output_note.metadata().note_type(), NoteType::Public); + + // Commit the CLAIM transaction and prove the block so the MINT note can be consumed + mock_chain.add_pending_executed_transaction(&claim_executed)?; + mock_chain.prove_next_block()?; - let tx_context = mock_chain - .build_tx_context(agglayer_faucet.id(), &[], &[claim_note])? - .foreign_accounts(vec![foreign_account_inputs]) + // TX3: EXECUTE MINT NOTE AGAINST AGGFAUCET (mints asset, creates P2ID note) + // -------------------------------------------------------------------------------------------- + let mint_tx_context = mock_chain + .build_tx_context(agglayer_faucet.id(), &[mint_output_note.id()], &[])? + .add_note_script(P2idNote::script()) .build()?; - let executed_transaction = tx_context.execute().await?; + let mint_executed = mint_tx_context + .execute() + .await + .context("TX3: MINT note execution against faucet failed")?; - // VERIFY P2ID NOTE WAS CREATED + // VERIFY P2ID NOTE WAS CREATED BY THE FAUCET // -------------------------------------------------------------------------------------------- // Check that exactly one P2ID note was created by the faucet - assert_eq!(executed_transaction.output_notes().num_notes(), 1); - let output_note = executed_transaction.output_notes().get_note(0); - - // Verify the output note contains the minted fungible asset - let expected_asset = FungibleAsset::new(agglayer_faucet.id(), claim_amount.into())?; + assert_eq!(mint_executed.output_notes().num_notes(), 1); + let output_note = mint_executed.output_notes().get_note(0); // Verify note metadata properties assert_eq!(output_note.metadata().sender(), agglayer_faucet.id()); assert_eq!(output_note.metadata().note_type(), NoteType::Public); - assert_eq!(output_note.id(), expected_p2id_note.id()); - // Extract the full note from the OutputNote enum for detailed verification - let full_note = match output_note { - OutputNote::Full(note) => note, - _ => panic!("Expected OutputNote::Full variant for public note"), + // Extract and verify P2ID asset contents + let mut assets_iter = output_note.assets().iter_fungible(); + let p2id_asset = assets_iter.next().unwrap(); + + // Verify minted amount matches expected scaled value + assert_eq!( + Felt::new(p2id_asset.amount().inner()), + miden_claim_amount, + "asset amount does not match" + ); + + // Verify faucet ID matches agglayer_faucet (P2ID token issuer) + assert_eq!( + p2id_asset.faucet_id(), + agglayer_faucet.id(), + "P2ID asset faucet ID doesn't match agglayer_faucet: got {:?}, expected {:?}", + p2id_asset.faucet_id(), + agglayer_faucet.id() + ); + + // Verify full note ID construction + let expected_asset: Asset = + FungibleAsset::new(agglayer_faucet.id(), miden_claim_amount.as_canonical_u64()) + .unwrap() + .into(); + let expected_output_p2id_note = create_p2id_note_exact( + agglayer_faucet.id(), + destination_account_id, + vec![expected_asset], + NoteType::Public, + serial_num, + ) + .unwrap(); + + assert_eq!(RawOutputNote::Full(expected_output_p2id_note.clone()), *output_note); + + // TX4: CONSUME THE P2ID NOTE WITH THE DESTINATION ACCOUNT (simulated case only) + // -------------------------------------------------------------------------------------------- + // For the simulated case, we control the destination account and can verify the full + // end-to-end flow including P2ID consumption and balance updates. + if let Some(destination_account) = destination_account { + // Add the faucet transaction to the chain and prove the next block so the P2ID note is + // committed and can be consumed. + mock_chain.add_pending_executed_transaction(&mint_executed)?; + mock_chain.prove_next_block()?; + + // Execute the consume transaction for the destination account + let consume_tx_context = mock_chain + .build_tx_context( + destination_account.id(), + &[], + slice::from_ref(&expected_output_p2id_note), + )? + .build()?; + let consume_executed_transaction = consume_tx_context.execute().await?; + + // Verify the destination account received the minted asset + let mut destination_account = destination_account.clone(); + destination_account.apply_delta(consume_executed_transaction.account_delta())?; + + let balance = destination_account.vault().get_balance(agglayer_faucet.id())?; + assert_eq!( + balance, + miden_claim_amount.as_canonical_u64(), + "destination account balance does not match" + ); + } + Ok(()) +} + +/// Tests that consuming a CLAIM note with the same PROOF_DATA_KEY twice fails. +/// +/// This test verifies the nullifier tracking mechanism: +/// 1. Sets up the bridge (CONFIG + UPDATE_GER) +/// 2. Executes the first CLAIM note successfully +/// 3. Creates a second CLAIM note with the same proof data +/// 4. Attempts to execute the second CLAIM note and asserts it fails with "claim note has already +/// been spent" +#[tokio::test] +async fn test_duplicate_claim_note_rejected() -> anyhow::Result<()> { + let data_source = ClaimDataSource::Simulated; + let mut builder = MockChain::builder(); + + // CREATE BRIDGE ADMIN ACCOUNT + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE BRIDGE ACCOUNT + let bridge_seed = builder.rng_mut().draw_word(); + let bridge_account = + create_existing_bridge_account(bridge_seed, bridge_admin.id(), ger_manager.id()); + builder.add_account(bridge_account.clone())?; + + // GET CLAIM DATA FROM JSON + let (proof_data, leaf_data, ger, _cgi_chain_hash) = data_source.get_data(); + + // CREATE AGGLAYER FAUCET ACCOUNT + let token_symbol = "AGG"; + let decimals = 8u8; + let max_supply = Felt::new(FungibleAsset::MAX_AMOUNT); + let agglayer_faucet_seed = builder.rng_mut().draw_word(); + + let origin_token_address = leaf_data.origin_token_address; + let origin_network = leaf_data.origin_network; + let scale = 10u8; + + let agglayer_faucet = create_existing_agglayer_faucet( + agglayer_faucet_seed, + token_symbol, + decimals, + max_supply, + Felt::ZERO, + bridge_account.id(), + &origin_token_address, + origin_network, + scale, + leaf_data.metadata_hash, + ); + builder.add_account(agglayer_faucet.clone())?; + + // Calculate the scaled-down Miden amount + let miden_claim_amount = leaf_data + .amount + .scale_to_token_amount(scale as u32) + .expect("amount should scale successfully"); + + // CREATE FIRST CLAIM NOTE + let claim_inputs_1 = ClaimNoteStorage { + proof_data: proof_data.clone(), + leaf_data: leaf_data.clone(), + miden_claim_amount, + }; + + let claim_note_1 = create_claim_note( + claim_inputs_1, + bridge_account.id(), + bridge_admin.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(claim_note_1.clone())); + + // CREATE SECOND CLAIM NOTE (same proof data = same PROOF_DATA_KEY) + let claim_inputs_2 = ClaimNoteStorage { + proof_data: proof_data.clone(), + leaf_data: leaf_data.clone(), + miden_claim_amount, }; - // Verify note structure and asset content - let expected_asset_obj = Asset::from(expected_asset); - assert_eq!(full_note, &expected_p2id_note); + let claim_note_2 = create_claim_note( + claim_inputs_2, + bridge_account.id(), + bridge_admin.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(claim_note_2.clone())); + + // CREATE CONFIG_AGG_BRIDGE NOTE + let config_note = ConfigAggBridgeNote::create( + agglayer_faucet.id(), + &origin_token_address, + bridge_admin.id(), + bridge_account.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(config_note.clone())); + + // CREATE UPDATE_GER NOTE + let update_ger_note = + UpdateGerNote::create(ger, ger_manager.id(), bridge_account.id(), builder.rng_mut())?; + builder.add_output_note(RawOutputNote::Full(update_ger_note.clone())); - assert!(full_note.assets().iter().any(|asset| asset == &expected_asset_obj)); + // BUILD MOCK CHAIN + let mut mock_chain = builder.clone().build()?; - // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_transaction)?; + // TX0: CONFIG_AGG_BRIDGE + let config_tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[config_note.id()], &[])? + .build()?; + let config_executed = config_tx_context.execute().await?; + mock_chain.add_pending_executed_transaction(&config_executed)?; mock_chain.prove_next_block()?; - // CONSUME THE OUTPUT NOTE WITH TARGET ACCOUNT - // -------------------------------------------------------------------------------------------- - // Consume the output note with target account - let mut user_account_mut = user_account.clone(); - let consume_tx_context = mock_chain - .build_tx_context(user_account_mut.clone(), &[], slice::from_ref(&expected_p2id_note))? + // TX1: UPDATE_GER + let update_ger_tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[update_ger_note.id()], &[])? .build()?; - let consume_executed_transaction = consume_tx_context.execute().await?; + let update_ger_executed = update_ger_tx_context.execute().await?; + mock_chain.add_pending_executed_transaction(&update_ger_executed)?; + mock_chain.prove_next_block()?; - user_account_mut.apply_delta(consume_executed_transaction.account_delta())?; + // TX2: FIRST CLAIM (should succeed) + let faucet_foreign_inputs_1 = mock_chain.get_foreign_account_inputs(agglayer_faucet.id())?; + let claim_tx_context_1 = mock_chain + .build_tx_context(bridge_account.id(), &[], &[claim_note_1])? + .foreign_accounts(vec![faucet_foreign_inputs_1]) + .build()?; + let claim_executed_1 = claim_tx_context_1.execute().await?; + assert_eq!(claim_executed_1.output_notes().num_notes(), 1); - // Verify the account's vault now contains the expected fungible asset - let balance = user_account_mut.vault().get_balance(agglayer_faucet.id())?; - assert_eq!(balance, expected_asset.amount()); + mock_chain.add_pending_executed_transaction(&claim_executed_1)?; + mock_chain.prove_next_block()?; + // TX3: SECOND CLAIM WITH SAME PROOF_DATA_KEY (should fail) + let faucet_foreign_inputs_2 = mock_chain.get_foreign_account_inputs(agglayer_faucet.id())?; + let claim_tx_context_2 = mock_chain + .build_tx_context(bridge_account.id(), &[], &[claim_note_2])? + .foreign_accounts(vec![faucet_foreign_inputs_2]) + .build()?; + let result = claim_tx_context_2.execute().await; + + assert!(result.is_err(), "Second claim with same PROOF_DATA_KEY should fail"); + let error_msg = result.unwrap_err().to_string(); + let expected_err_code = ERR_CLAIM_ALREADY_SPENT.code().to_string(); + assert!( + error_msg.contains(&expected_err_code), + "expected error code {expected_err_code} for 'claim note has already been spent', got: {error_msg}" + ); + + Ok(()) +} + +#[tokio::test] +async fn solidity_verify_merkle_proof_compatibility() -> anyhow::Result<()> { + let merkle_paths = &*SOLIDITY_MERKLE_PROOF_VECTORS; + + assert_eq!(merkle_paths.leaves.len(), merkle_paths.roots.len()); + assert_eq!(merkle_paths.leaves.len() * 32, merkle_paths.merkle_paths.len()); + + for leaf_index in 0..32 { + let source = merkle_proof_verification_code(leaf_index, merkle_paths); + + let tx_script = CodeBuilder::new() + .with_statically_linked_library(&agglayer_library())? + .compile_tx_script(source)?; + + TransactionContextBuilder::with_existing_mock_account() + .tx_script(tx_script.clone()) + .build()? + .execute() + .await + .context(format!("failed to execute transaction with leaf index {leaf_index}"))?; + } Ok(()) } diff --git a/crates/miden-testing/tests/agglayer/bridge_out.rs b/crates/miden-testing/tests/agglayer/bridge_out.rs index b00af4417f..bcb963ffc3 100644 --- a/crates/miden-testing/tests/agglayer/bridge_out.rs +++ b/crates/miden-testing/tests/agglayer/bridge_out.rs @@ -1,157 +1,357 @@ extern crate alloc; -use miden_agglayer::errors::ERR_B2AGG_TARGET_ACCOUNT_MISMATCH; -use miden_agglayer::{B2AggNote, EthAddressFormat, create_existing_bridge_account}; +use miden_agglayer::errors::{ERR_B2AGG_TARGET_ACCOUNT_MISMATCH, ERR_FAUCET_NOT_REGISTERED}; +use miden_agglayer::{ + AggLayerBridge, + B2AggNote, + ConfigAggBridgeNote, + EthAddressFormat, + ExitRoot, + MetadataHash, + create_existing_agglayer_faucet, + create_existing_bridge_account, +}; use miden_crypto::rand::FeltRng; use miden_protocol::Felt; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; use miden_protocol::asset::{Asset, FungibleAsset}; -use miden_protocol::note::{NoteAssets, NoteTag, NoteType}; -use miden_protocol::transaction::OutputNote; +use miden_protocol::note::{NoteAssets, NoteScript, NoteType}; +use miden_protocol::transaction::RawOutputNote; use miden_standards::account::faucets::TokenMetadata; +use miden_standards::account::mint_policies::OwnerControlledInitConfig; use miden_standards::note::StandardNote; use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; +use miden_tx::utils::hex_to_bytes; -/// Tests the B2AGG (Bridge to AggLayer) note script with bridge_out account component. +use super::test_utils::SOLIDITY_MMR_FRONTIER_VECTORS; + +/// Tests that 32 sequential B2AGG note consumptions match all 32 Solidity MMR roots. /// -/// This test flow: -/// 1. Creates a network faucet to provide assets -/// 2. Creates a bridge account with the bridge_out component (using network storage) -/// 3. Creates a B2AGG note with assets from the network faucet -/// 4. Executes the B2AGG note consumption via network transaction -/// 5. Consumes the BURN note +/// This test exercises the complete bridge-out lifecycle: +/// 1. Creates a bridge account (empty faucet registry) and an agglayer faucet with conversion +/// metadata (origin token address, network, scale) +/// 2. Registers the faucet in the bridge's faucet registry via a CONFIG_AGG_BRIDGE note +/// 3. Creates a B2AGG note with assets from the agglayer faucet +/// 4. Consumes the B2AGG note against the bridge account — the bridge's `bridge_out` procedure: +/// - Validates the faucet is registered via `convert_asset` +/// - Calls the faucet's `asset_to_origin_asset` via FPI to get the scaled amount, origin token +/// address, and origin network +/// - Writes the leaf data and computes the Keccak hash for the MMR +/// - Creates a BURN note addressed to the faucet +/// 5. Verifies the BURN note was created with the correct asset, tag, and script +/// 6. Consumes the BURN note with the faucet to burn the tokens #[tokio::test] -async fn test_bridge_out_consumes_b2agg_note() -> anyhow::Result<()> { - let mut builder = MockChain::builder(); - - // Create a network faucet owner account - let faucet_owner_account_id = AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, +async fn bridge_out_consecutive() -> anyhow::Result<()> { + let vectors = &*SOLIDITY_MMR_FRONTIER_VECTORS; + let note_count = 32usize; + assert_eq!(vectors.amounts.len(), note_count, "amount vectors should contain 32 entries"); + assert_eq!(vectors.roots.len(), note_count, "root vectors should contain 32 entries"); + assert_eq!( + vectors.destination_networks.len(), + note_count, + "destination network vectors should contain 32 entries" + ); + assert_eq!( + vectors.destination_addresses.len(), + note_count, + "destination address vectors should contain 32 entries" ); - // Create a network faucet to provide assets for the B2AGG note - let faucet = - builder.add_existing_network_faucet("AGG", 1000, faucet_owner_account_id, Some(100))?; - - // Create a bridge account (includes a `bridge_out` component tested here) - let mut bridge_account = create_existing_bridge_account(builder.rng_mut().draw_word()); - builder.add_account(bridge_account.clone())?; + let mut builder = MockChain::builder(); - // CREATE B2AGG NOTE WITH ASSETS - // -------------------------------------------------------------------------------------------- + // CREATE BRIDGE ADMIN ACCOUNT (sends CONFIG_AGG_BRIDGE notes) + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - let amount = Felt::new(100); - let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + // CREATE GER MANAGER ACCOUNT (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - // Create note storage with destination network and address - let destination_network = 1u32; // Example network ID - let destination_address = "0x1234567890abcdef1122334455667788990011aa"; - let eth_address = - EthAddressFormat::from_hex(destination_address).expect("Valid Ethereum address"); + let mut bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; - let assets = NoteAssets::new(vec![bridge_asset])?; + let expected_amounts = vectors + .amounts + .iter() + .map(|amount| amount.parse::().expect("valid amount decimal string")) + .collect::>(); + let total_burned: u64 = expected_amounts.iter().sum(); - // Create the B2AGG note using the helper - let b2agg_note = B2AggNote::create( - destination_network, - eth_address, - assets, + // CREATE AGGLAYER FAUCET ACCOUNT (with conversion metadata for FPI) + // -------------------------------------------------------------------------------------------- + let origin_token_address = EthAddressFormat::from_hex(&vectors.origin_token_address) + .expect("valid shared origin token address"); + let origin_network = 64u32; + let scale = 0u8; + let metadata_hash = MetadataHash::from_token_info( + &vectors.token_name, + &vectors.token_symbol, + vectors.token_decimals, + ); + let faucet = create_existing_agglayer_faucet( + builder.rng_mut().draw_word(), + &vectors.token_symbol, + vectors.token_decimals, + Felt::new(FungibleAsset::MAX_AMOUNT), + Felt::new(total_burned), bridge_account.id(), + &origin_token_address, + origin_network, + scale, + metadata_hash, + ); + builder.add_account(faucet.clone())?; + + // CONFIG_AGG_BRIDGE note to register the faucet in the bridge (sent by bridge admin) + let config_note = ConfigAggBridgeNote::create( faucet.id(), + &origin_token_address, + bridge_admin.id(), + bridge_account.id(), builder.rng_mut(), )?; + builder.add_output_note(RawOutputNote::Full(config_note.clone())); + + // CREATE ALL B2AGG NOTES UPFRONT (before building mock chain) + // -------------------------------------------------------------------------------------------- + let mut notes = Vec::with_capacity(note_count); + for (i, &amount) in expected_amounts.iter().enumerate().take(note_count) { + let destination_network = vectors.destination_networks[i]; + let eth_address = EthAddressFormat::from_hex(&vectors.destination_addresses[i]) + .expect("valid destination address"); + + let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount).unwrap().into(); + let note = B2AggNote::create( + destination_network, + eth_address, + NoteAssets::new(vec![bridge_asset])?, + bridge_account.id(), + faucet.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(note.clone())); + notes.push(note); + } - // Add the B2AGG note to the mock chain - builder.add_output_note(OutputNote::Full(b2agg_note.clone())); let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; - // EXECUTE B2AGG NOTE AGAINST BRIDGE ACCOUNT (NETWORK TRANSACTION) + // STEP 1: REGISTER FAUCET VIA CONFIG_AGG_BRIDGE NOTE // -------------------------------------------------------------------------------------------- - let tx_context = mock_chain - .build_tx_context(bridge_account.id(), &[b2agg_note.id()], &[])? - .build()?; - let executed_transaction = tx_context.execute().await?; + let config_executed = mock_chain + .build_tx_context(bridge_account.id(), &[config_note.id()], &[])? + .build()? + .execute() + .await?; + bridge_account.apply_delta(config_executed.account_delta())?; + mock_chain.add_pending_executed_transaction(&config_executed)?; + mock_chain.prove_next_block()?; - // VERIFY PUBLIC BURN NOTE WAS CREATED + // STEP 2: CONSUME 32 B2AGG NOTES AND VERIFY FRONTIER EVOLUTION + // -------------------------------------------------------------------------------------------- + let burn_note_script: NoteScript = StandardNote::BURN.script(); + let mut burn_note_ids = Vec::with_capacity(note_count); + + for (i, note) in notes.iter().enumerate() { + let foreign_account_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + let executed_tx = mock_chain + .build_tx_context(bridge_account.clone(), &[note.id()], &[])? + .add_note_script(burn_note_script.clone()) + .foreign_accounts(vec![foreign_account_inputs]) + .build()? + .execute() + .await?; + + assert_eq!( + executed_tx.output_notes().num_notes(), + 1, + "Expected one BURN note after consume #{}", + i + 1 + ); + let burn_note = match executed_tx.output_notes().get_note(0) { + RawOutputNote::Full(note) => note, + _ => panic!("Expected OutputNote::Full variant for BURN note"), + }; + burn_note_ids.push(burn_note.id()); + + let expected_asset = Asset::from(FungibleAsset::new(faucet.id(), expected_amounts[i])?); + assert!( + burn_note.assets().iter().any(|asset| asset == &expected_asset), + "BURN note after consume #{} should contain the bridged asset", + i + 1 + ); + assert_eq!( + burn_note.metadata().note_type(), + NoteType::Public, + "BURN note should be public" + ); + let attachment = burn_note.metadata().attachment(); + let network_target = miden_standards::note::NetworkAccountTarget::try_from(attachment) + .expect("BURN note attachment should be a valid NetworkAccountTarget"); + assert_eq!( + network_target.target_id(), + faucet.id(), + "BURN note attachment should target the faucet" + ); + assert_eq!( + burn_note.recipient().script().root(), + StandardNote::BURN.script_root(), + "BURN note should use the BURN script" + ); + + bridge_account.apply_delta(executed_tx.account_delta())?; + assert_eq!( + AggLayerBridge::read_let_num_leaves(&bridge_account), + (i + 1) as u64, + "LET leaf count should match consumed notes" + ); + + let expected_ler = + ExitRoot::new(hex_to_bytes(&vectors.roots[i]).expect("valid root hex")).to_elements(); + assert_eq!( + AggLayerBridge::read_local_exit_root(&bridge_account)?, + expected_ler, + "Local Exit Root after {} leaves should match the Solidity-generated root", + i + 1 + ); + + mock_chain.add_pending_executed_transaction(&executed_tx)?; + mock_chain.prove_next_block()?; + } + + // STEP 3: CONSUME ALL BURN NOTES WITH THE AGGLAYER FAUCET // -------------------------------------------------------------------------------------------- - // The bridge_out component should create a PUBLIC BURN note addressed to the faucet + let initial_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); assert_eq!( - executed_transaction.output_notes().num_notes(), - 1, - "Expected one BURN note to be created" + initial_token_supply, + Felt::new(total_burned), + "Initial issuance should match all pending burns" + ); + + let mut faucet = faucet; + for burn_note_id in burn_note_ids { + let burn_executed_tx = mock_chain + .build_tx_context(faucet.id(), &[burn_note_id], &[])? + .build()? + .execute() + .await?; + assert_eq!( + burn_executed_tx.output_notes().num_notes(), + 0, + "Burn transaction should not create output notes" + ); + faucet.apply_delta(burn_executed_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&burn_executed_tx)?; + mock_chain.prove_next_block()?; + } + + let final_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); + assert_eq!( + final_token_supply, + Felt::new(initial_token_supply.as_canonical_u64() - total_burned), + "Token supply should decrease by the sum of 32 bridged amounts" ); - let output_note = executed_transaction.output_notes().get_note(0); + Ok(()) +} - // Extract the full note from the OutputNote enum - let burn_note = match output_note { - OutputNote::Full(note) => note, - _ => panic!("Expected OutputNote::Full variant for BURN note"), - }; +/// Tests that bridging out fails when the faucet is not registered in the bridge's registry. +/// +/// This test verifies the faucet allowlist check in bridge_out's `convert_asset` procedure: +/// 1. Creates a bridge account with an empty faucet registry (no faucets registered) +/// 2. Creates a B2AGG note with an asset from an agglayer faucet +/// 3. Attempts to consume the B2AGG note against the bridge — this should fail because +/// `convert_asset` checks the faucet registry and panics with ERR_FAUCET_NOT_REGISTERED when the +/// faucet is not found +#[tokio::test] +async fn test_bridge_out_fails_with_unregistered_faucet() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); - // Verify the BURN note is public - assert_eq!(burn_note.metadata().note_type(), NoteType::Public, "BURN note should be public"); + // CREATE BRIDGE ADMIN ACCOUNT + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - // Verify the BURN note contains the bridged asset - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; - let expected_asset_obj = Asset::from(expected_asset); - assert!( - burn_note.assets().iter().any(|asset| asset == &expected_asset_obj), - "BURN note should contain the bridged asset" - ); + // CREATE GER MANAGER ACCOUNT (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - assert_eq!( - burn_note.metadata().tag(), - NoteTag::with_account_target(faucet.id()), - "BURN note should have the correct tag" + // CREATE BRIDGE ACCOUNT (empty faucet registry — no faucets registered) + // -------------------------------------------------------------------------------------------- + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), ); + builder.add_account(bridge_account.clone())?; - // Verify the BURN note uses the correct script - assert_eq!( - burn_note.recipient().script().root(), - StandardNote::BURN.script_root(), - "BURN note should use the BURN script" + // CREATE AGGLAYER FAUCET ACCOUNT (NOT registered in the bridge) + // -------------------------------------------------------------------------------------------- + let vectors = &*SOLIDITY_MMR_FRONTIER_VECTORS; + let origin_token_address = EthAddressFormat::new([0u8; 20]); + let metadata_hash = MetadataHash::from_token_info( + &vectors.token_name, + &vectors.token_symbol, + vectors.token_decimals, ); + let faucet = create_existing_agglayer_faucet( + builder.rng_mut().draw_word(), + &vectors.token_symbol, + vectors.token_decimals, + Felt::new(FungibleAsset::MAX_AMOUNT), + Felt::new(100), + bridge_account.id(), + &origin_token_address, + 0, // origin_network + 0, // scale + metadata_hash, + ); + builder.add_account(faucet.clone())?; - // Apply the delta to the bridge account - bridge_account.apply_delta(executed_transaction.account_delta())?; + // CREATE B2AGG NOTE WITH ASSETS FROM THE UNREGISTERED FAUCET + // -------------------------------------------------------------------------------------------- + let amount = Felt::new(100); + let bridge_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); - // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_transaction)?; - mock_chain.prove_next_block()?; + let destination_address = "0x1234567890abcdef1122334455667788990011aa"; + let eth_address = + EthAddressFormat::from_hex(destination_address).expect("valid Ethereum address"); - // CONSUME THE BURN NOTE WITH THE NETWORK FAUCET - // -------------------------------------------------------------------------------------------- - // Check the initial token issuance before burning - let initial_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); - assert_eq!(initial_token_supply, Felt::new(100), "Initial issuance should be 100"); + let b2agg_note = B2AggNote::create( + 1u32, // destination_network + eth_address, + NoteAssets::new(vec![bridge_asset])?, + bridge_account.id(), + faucet.id(), + builder.rng_mut(), + )?; - // Execute the BURN note against the network faucet - let burn_tx_context = - mock_chain.build_tx_context(faucet.id(), &[burn_note.id()], &[])?.build()?; - let burn_executed_transaction = burn_tx_context.execute().await?; + builder.add_output_note(RawOutputNote::Full(b2agg_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; - // Verify the burn transaction was successful - no output notes should be created - assert_eq!( - burn_executed_transaction.output_notes().num_notes(), - 0, - "Burn transaction should not create output notes" - ); + // ATTEMPT TO BRIDGE OUT WITHOUT REGISTERING THE FAUCET (SHOULD FAIL) + // -------------------------------------------------------------------------------------------- + let foreign_account_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; - // Apply the delta to the faucet account and verify the token issuance decreased - let mut faucet = faucet; - faucet.apply_delta(burn_executed_transaction.account_delta())?; + let result = mock_chain + .build_tx_context(bridge_account.id(), &[b2agg_note.id()], &[])? + .foreign_accounts(vec![foreign_account_inputs]) + .build()? + .execute() + .await; - let final_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); - assert_eq!( - final_token_supply, - Felt::new(initial_token_supply.as_int() - amount.as_int()), - "Token issuance should decrease by the burned amount" - ); + assert_transaction_executor_error!(result, ERR_FAUCET_NOT_REGISTERED); Ok(()) } @@ -168,7 +368,7 @@ async fn test_bridge_out_consumes_b2agg_note() -> anyhow::Result<()> { /// 4. The same user account consumes the B2AGG note (triggering reclaim branch) /// 5. Verifies that assets are added back to the account and no BURN note is created #[tokio::test] -async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { +async fn b2agg_note_reclaim_scenario() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create a network faucet owner account @@ -180,33 +380,52 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { ); // Create a network faucet to provide assets for the B2AGG note - let faucet = - builder.add_existing_network_faucet("AGG", 1000, faucet_owner_account_id, Some(100))?; + let faucet = builder.add_existing_network_faucet( + "AGG", + 1000, + faucet_owner_account_id, + Some(100), + OwnerControlledInitConfig::OwnerOnly, + )?; - // Create a bridge account (includes a `bridge_out` component tested here) - let bridge_account = create_existing_bridge_account(builder.rng_mut().draw_word()); + // Create a bridge admin account + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a GER manager account (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a bridge account (includes a `bridge` component) + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); builder.add_account(bridge_account.clone())?; // Create a user account that will create and consume the B2AGG note - let mut user_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let mut user_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // CREATE B2AGG NOTE WITH USER ACCOUNT AS SENDER // -------------------------------------------------------------------------------------------- - let amount = Felt::new(50); - let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + let bridge_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); - // Create note storage with destination network and address let destination_network = 1u32; let destination_address = "0x1234567890abcdef1122334455667788990011aa"; let eth_address = - EthAddressFormat::from_hex(destination_address).expect("Valid Ethereum address"); + EthAddressFormat::from_hex(destination_address).expect("valid Ethereum address"); let assets = NoteAssets::new(vec![bridge_asset])?; - // Create the B2AGG note with the USER ACCOUNT as the sender - // This is the key difference - the note sender will be the same as the consuming account + // Create the B2AGG note with the USER ACCOUNT as the sender. + // This is the key difference — the note sender will be the same as the consuming account. let b2agg_note = B2AggNote::create( destination_network, eth_address, @@ -216,8 +435,7 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { builder.rng_mut(), )?; - // Add the B2AGG note to the mock chain - builder.add_output_note(OutputNote::Full(b2agg_note.clone())); + builder.add_output_note(RawOutputNote::Full(b2agg_note.clone())); let mut mock_chain = builder.build()?; // Store the initial asset balance of the user account @@ -232,7 +450,6 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { // VERIFY NO BURN NOTE WAS CREATED (RECLAIM BRANCH) // -------------------------------------------------------------------------------------------- - // In the reclaim scenario, no BURN note should be created assert_eq!( executed_transaction.output_notes().num_notes(), 0, @@ -245,14 +462,12 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { // VERIFY ASSETS WERE ADDED BACK TO THE ACCOUNT // -------------------------------------------------------------------------------------------- let final_balance = user_account.vault().get_balance(faucet.id()).unwrap_or(0u64); - let expected_balance = initial_balance + amount.as_int(); - assert_eq!( - final_balance, expected_balance, + final_balance, + initial_balance + amount.as_canonical_u64(), "User account should have received the assets back from the B2AGG note" ); - // Apply the transaction to the mock chain mock_chain.add_pending_executed_transaction(&executed_transaction)?; mock_chain.prove_next_block()?; @@ -273,7 +488,7 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { /// 5. Attempts to consume the B2AGG note with the malicious account /// 6. Verifies that the transaction fails with ERR_B2AGG_TARGET_ACCOUNT_MISMATCH #[tokio::test] -async fn test_b2agg_note_non_target_account_cannot_consume() -> anyhow::Result<()> { +async fn b2agg_note_non_target_account_cannot_consume() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create a network faucet owner account @@ -285,36 +500,59 @@ async fn test_b2agg_note_non_target_account_cannot_consume() -> anyhow::Result<( ); // Create a network faucet to provide assets for the B2AGG note - let faucet = - builder.add_existing_network_faucet("AGG", 1000, faucet_owner_account_id, Some(100))?; + let faucet = builder.add_existing_network_faucet( + "AGG", + 1000, + faucet_owner_account_id, + Some(100), + OwnerControlledInitConfig::OwnerOnly, + )?; + + // Create a bridge admin account + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a GER manager account (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create a bridge account as the designated TARGET for the B2AGG note - let bridge_account = create_existing_bridge_account(builder.rng_mut().draw_word()); + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); builder.add_account(bridge_account.clone())?; // Create a user account as the SENDER of the B2AGG note - let sender_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let sender_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create a "malicious" account with a bridge interface - let malicious_account = create_existing_bridge_account(builder.rng_mut().draw_word()); + let malicious_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); builder.add_account(malicious_account.clone())?; // CREATE B2AGG NOTE // -------------------------------------------------------------------------------------------- - let amount = Felt::new(50); - let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + let bridge_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); - // Create note storage with destination network and address let destination_network = 1u32; let destination_address = "0x1234567890abcdef1122334455667788990011aa"; let eth_address = - EthAddressFormat::from_hex(destination_address).expect("Valid Ethereum address"); + EthAddressFormat::from_hex(destination_address).expect("valid Ethereum address"); let assets = NoteAssets::new(vec![bridge_asset])?; - // Create the B2AGG note + // Create the B2AGG note targeting the real bridge account let b2agg_note = B2AggNote::create( destination_network, eth_address, @@ -324,8 +562,7 @@ async fn test_b2agg_note_non_target_account_cannot_consume() -> anyhow::Result<( builder.rng_mut(), )?; - // Add the B2AGG note to the mock chain - builder.add_output_note(OutputNote::Full(b2agg_note.clone())); + builder.add_output_note(RawOutputNote::Full(b2agg_note.clone())); let mock_chain = builder.build()?; // ATTEMPT TO CONSUME B2AGG NOTE WITH MALICIOUS ACCOUNT (SHOULD FAIL) diff --git a/crates/miden-testing/tests/agglayer/config_bridge.rs b/crates/miden-testing/tests/agglayer/config_bridge.rs new file mode 100644 index 0000000000..607cc581e6 --- /dev/null +++ b/crates/miden-testing/tests/agglayer/config_bridge.rs @@ -0,0 +1,100 @@ +extern crate alloc; + +use miden_agglayer::{ + AggLayerBridge, + ConfigAggBridgeNote, + EthAddressFormat, + create_existing_bridge_account, +}; +use miden_protocol::Felt; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::transaction::RawOutputNote; +use miden_testing::{Auth, MockChain}; + +/// Tests that a CONFIG_AGG_BRIDGE note registers a faucet in the bridge's faucet registry. +/// +/// Flow: +/// 1. Create an admin (sender) account +/// 2. Create a bridge account with the admin as authorized operator +/// 3. Create a CONFIG_AGG_BRIDGE note carrying a faucet ID, sent by the admin +/// 4. Consume the note with the bridge account +/// 5. Verify the faucet is now in the bridge's faucet_registry map +#[tokio::test] +async fn test_config_agg_bridge_registers_faucet() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + // CREATE BRIDGE ADMIN ACCOUNT (note sender) + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE BRIDGE ACCOUNT (starts with empty faucet registry) + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; + + // Use a dummy faucet ID to register (any valid AccountId will do) + let faucet_to_register = AccountId::dummy( + [42; 15], + AccountIdVersion::Version0, + AccountType::FungibleFaucet, + AccountStorageMode::Network, + ); + + // Verify the faucet is NOT in the registry before registration + let registry_slot_name = AggLayerBridge::faucet_registry_map_slot_name(); + let key = AccountIdKey::new(faucet_to_register).as_word(); + let value_before = bridge_account.storage().get_map_item(registry_slot_name, key)?; + assert_eq!( + value_before, + [Felt::ZERO; 4].into(), + "Faucet should not be in registry before registration" + ); + + // CREATE CONFIG_AGG_BRIDGE NOTE + // Use a dummy origin token address for this test + let origin_token_address = + EthAddressFormat::from_hex("0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48").unwrap(); + let config_note = ConfigAggBridgeNote::create( + faucet_to_register, + &origin_token_address, + bridge_admin.id(), + bridge_account.id(), + builder.rng_mut(), + )?; + + builder.add_output_note(RawOutputNote::Full(config_note.clone())); + let mock_chain = builder.build()?; + + // CONSUME THE CONFIG_AGG_BRIDGE NOTE WITH THE BRIDGE ACCOUNT + let tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[config_note.id()], &[])? + .build()?; + let executed_transaction = tx_context.execute().await?; + + // VERIFY FAUCET IS NOW REGISTERED + let mut updated_bridge = bridge_account.clone(); + updated_bridge.apply_delta(executed_transaction.account_delta())?; + + let value_after = updated_bridge.storage().get_map_item(registry_slot_name, key)?; + // TODO: use a getter helper on AggLayerBridge once available + // (see https://github.com/0xMiden/protocol/issues/2548) + let expected_value = [Felt::ONE, Felt::ZERO, Felt::ZERO, Felt::ZERO].into(); + assert_eq!( + value_after, expected_value, + "Faucet should be registered with value [1, 0, 0, 0]" + ); + + Ok(()) +} diff --git a/crates/miden-testing/tests/agglayer/crypto_utils.rs b/crates/miden-testing/tests/agglayer/crypto_utils.rs deleted file mode 100644 index 392795086c..0000000000 --- a/crates/miden-testing/tests/agglayer/crypto_utils.rs +++ /dev/null @@ -1,248 +0,0 @@ -extern crate alloc; - -use alloc::string::String; -use alloc::sync::Arc; -use alloc::vec::Vec; - -use anyhow::Context; -use miden_agglayer::agglayer_library; -use miden_assembly::{Assembler, DefaultSourceManager}; -use miden_core_lib::CoreLibrary; -use miden_core_lib::handlers::bytes_to_packed_u32_felts; -use miden_core_lib::handlers::keccak256::KeccakPreimage; -use miden_crypto::FieldElement; -use miden_crypto::hash::keccak::Keccak256Digest; -use miden_processor::AdviceInputs; -use miden_protocol::utils::sync::LazyLock; -use miden_protocol::{Felt, Hasher, Word}; -use miden_standards::code_builder::CodeBuilder; -use miden_testing::TransactionContextBuilder; -use serde::Deserialize; - -use super::test_utils::{execute_program_with_default_host, keccak_digest_to_word_strings}; - -// LEAF_DATA_NUM_WORDS is defined as 8 in crypto_utils.masm, representing 8 Miden words of 4 felts -// each -const LEAF_DATA_FELTS: usize = 32; - -/// Merkle proof verification vectors JSON embedded at compile time from the Foundry-generated file. -const MERKLE_PROOF_VECTORS_JSON: &str = - include_str!("../../../miden-agglayer/solidity-compat/test-vectors/merkle_proof_vectors.json"); - -/// Deserialized Merkle proof vectors from Solidity DepositContractBase.sol -/// Uses parallel arrays for leaves and roots. For each element from leaves/roots there are 32 -/// elements from merkle_paths, which represent the merkle path for that leaf + root. -#[derive(Debug, Deserialize)] -struct MerkleProofVerificationFile { - leaves: Vec, - roots: Vec, - merkle_paths: Vec, -} - -/// Lazily parsed Merkle proof vectors from the JSON file. -static SOLIDITY_MERKLE_PROOF_VECTORS: LazyLock = LazyLock::new(|| { - serde_json::from_str(MERKLE_PROOF_VECTORS_JSON) - .expect("failed to parse Merkle proof vectors JSON") -}); - -fn u32_words_to_solidity_bytes32_hex(words: &[u64]) -> String { - assert_eq!(words.len(), 8, "expected 8 u32 words = 32 bytes"); - let mut out = [0u8; 32]; - - for (i, &w) in words.iter().enumerate() { - let le = (w as u32).to_le_bytes(); - out[i * 4..i * 4 + 4].copy_from_slice(&le); - } - - let mut s = String::from("0x"); - for b in out { - s.push_str(&format!("{:02x}", b)); - } - s -} - -// Helper: parse 0x-prefixed hex into a fixed-size byte array -fn hex_to_fixed(s: &str) -> [u8; N] { - let s = s.strip_prefix("0x").unwrap_or(s); - assert_eq!(s.len(), N * 2, "expected {} hex chars", N * 2); - let mut out = [0u8; N]; - for i in 0..N { - out[i] = u8::from_str_radix(&s[2 * i..2 * i + 2], 16).unwrap(); - } - out -} - -#[tokio::test] -async fn test_keccak_hash_get_leaf_value() -> anyhow::Result<()> { - let agglayer_lib = agglayer_library(); - - // === Values from hardhat test === - let leaf_type: u8 = 0; - let origin_network: u32 = 0; - let token_address: [u8; 20] = hex_to_fixed("0x1234567890123456789012345678901234567890"); - let destination_network: u32 = 1; - let destination_address: [u8; 20] = hex_to_fixed("0x0987654321098765432109876543210987654321"); - let amount_u64: u64 = 1; // 1e19 - let metadata_hash: [u8; 32] = - hex_to_fixed("0x2cdc14cacf6fec86a549f0e4d01e83027d3b10f29fa527c1535192c1ca1aac81"); - - // Expected hash value from Solidity implementation - let expected_hash = "0xf6825f6c59be2edf318d7251f4b94c0e03eb631b76a0e7b977fd8ed3ff925a3f"; - - // abi.encodePacked( - // uint8, uint32, address, uint32, address, uint256, bytes32 - // ) - let mut amount_u256_be = [0u8; 32]; - amount_u256_be[24..32].copy_from_slice(&amount_u64.to_be_bytes()); - - let mut input_u8 = Vec::with_capacity(113); - input_u8.push(leaf_type); - input_u8.extend_from_slice(&origin_network.to_be_bytes()); - input_u8.extend_from_slice(&token_address); - input_u8.extend_from_slice(&destination_network.to_be_bytes()); - input_u8.extend_from_slice(&destination_address); - input_u8.extend_from_slice(&amount_u256_be); - input_u8.extend_from_slice(&metadata_hash); - - let len_bytes = input_u8.len(); - assert_eq!(len_bytes, 113); - - let preimage = KeccakPreimage::new(input_u8.clone()); - let mut input_felts = bytes_to_packed_u32_felts(&input_u8); - // Pad to LEAF_DATA_FELTS (128 bytes) as expected by the downstream code - input_felts.resize(LEAF_DATA_FELTS, Felt::ZERO); - assert_eq!(input_felts.len(), LEAF_DATA_FELTS); - - // Arbitrary key to store input in advice map (in prod this is RPO(input_felts)) - let key: Word = Hasher::hash_elements(&input_felts); - let advice_inputs = AdviceInputs::default().with_map(vec![(key, input_felts)]); - - let source = format!( - r#" - use miden::core::sys - use miden::core::crypto::hashes::keccak256 - use miden::agglayer::crypto_utils - - begin - push.{key} - - exec.crypto_utils::get_leaf_value - exec.sys::truncate_stack - end - "# - ); - - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(agglayer_lib.clone()) - .unwrap() - .assemble_program(&source) - .unwrap(); - - let exec_output = execute_program_with_default_host(program, Some(advice_inputs)).await?; - - let digest: Vec = exec_output.stack[0..8].iter().map(|f| f.as_int()).collect(); - let hex_digest = u32_words_to_solidity_bytes32_hex(&digest); - - let keccak256_digest: Vec = preimage.digest().as_ref().iter().map(Felt::as_int).collect(); - let keccak256_hex_digest = u32_words_to_solidity_bytes32_hex(&keccak256_digest); - - assert_eq!(digest, keccak256_digest); - assert_eq!(hex_digest, keccak256_hex_digest); - assert_eq!(hex_digest, expected_hash); - Ok(()) -} - -#[tokio::test] -async fn test_solidity_verify_merkle_proof_compatibility() -> anyhow::Result<()> { - let merkle_paths = &*SOLIDITY_MERKLE_PROOF_VECTORS; - - // Validate array lengths - assert_eq!(merkle_paths.leaves.len(), merkle_paths.roots.len()); - // paths have 32 nodes for each leaf/root, so the overall paths length should be 32 times longer - // than leaves/roots length - assert_eq!(merkle_paths.leaves.len() * 32, merkle_paths.merkle_paths.len()); - - for leaf_index in 0..32 { - let source = merkle_proof_verification_code(leaf_index, merkle_paths); - - let tx_script = CodeBuilder::new() - .with_statically_linked_library(&agglayer_library())? - .compile_tx_script(source)?; - - TransactionContextBuilder::with_existing_mock_account() - .tx_script(tx_script.clone()) - .build()? - .execute() - .await - .context(format!("failed to execute transaction with leaf index {leaf_index}"))?; - } - - Ok(()) -} - -// HELPER FUNCTIONS -// ================================================================================================ - -fn merkle_proof_verification_code( - index: usize, - merkle_paths: &MerkleProofVerificationFile, -) -> String { - // generate the code which stores the merkle path to the memory - let mut store_path_source = String::new(); - for height in 0..32 { - let path_node = - Keccak256Digest::try_from(merkle_paths.merkle_paths[index * 32 + height].as_str()) - .unwrap(); - let (node_hi, node_lo) = keccak_digest_to_word_strings(path_node); - // each iteration (each index in leaf/root vector) we rewrite the merkle path nodes, so the - // memory pointers for the merkle path and the expected root never change - store_path_source.push_str(&format!( - " -\tpush.[{node_hi}] mem_storew_be.{} dropw -\tpush.[{node_lo}] mem_storew_be.{} dropw - ", - height * 8, - height * 8 + 4 - )); - } - - // prepare the root for the provided index - let root = Keccak256Digest::try_from(merkle_paths.roots[index].as_str()).unwrap(); - let (root_hi, root_lo) = keccak_digest_to_word_strings(root); - - // prepare the leaf for the provided index - let leaf = Keccak256Digest::try_from(merkle_paths.leaves[index].as_str()).unwrap(); - let (leaf_hi, leaf_lo) = keccak_digest_to_word_strings(leaf); - - format!( - r#" - use miden::agglayer::crypto_utils - - begin - # store the merkle path to the memory (double word slots from 0 to 248) - {store_path_source} - # => [] - - # store the root to the memory (double word slot 256) - push.[{root_lo}] mem_storew_be.256 dropw - push.[{root_hi}] mem_storew_be.260 dropw - # => [] - - # prepare the stack for the `verify_merkle_proof` procedure - push.256 # expected root memory pointer - push.{index} # provided leaf index - push.0 # Merkle path memory pointer - push.[{leaf_hi}] push.[{leaf_lo}] # provided leaf value - # => [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx, expected_root_ptr] - - exec.crypto_utils::verify_merkle_proof - # => [verification_flag] - - assert.err="verification failed" - # => [] - end - "# - ) -} diff --git a/crates/miden-testing/tests/agglayer/faucet_helpers.rs b/crates/miden-testing/tests/agglayer/faucet_helpers.rs new file mode 100644 index 0000000000..d678e0761a --- /dev/null +++ b/crates/miden-testing/tests/agglayer/faucet_helpers.rs @@ -0,0 +1,66 @@ +extern crate alloc; + +use miden_agglayer::{ + AggLayerFaucet, + EthAddressFormat, + MetadataHash, + create_existing_agglayer_faucet, + create_existing_bridge_account, +}; +use miden_protocol::Felt; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::asset::FungibleAsset; +use miden_protocol::crypto::rand::FeltRng; +use miden_testing::{Auth, MockChain}; + +#[test] +fn test_faucet_helper_methods() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; + + let token_symbol = "AGG"; + let decimals = 8u8; + let max_supply = Felt::new(FungibleAsset::MAX_AMOUNT); + let token_supply = Felt::new(123_456); + + let origin_token_address = + EthAddressFormat::from_hex("0x0102030405060708090a0b0c0d0e0f1011121314") + .expect("invalid token address"); + let origin_network = 42u32; + let scale = 6u8; + + let metadata_hash = MetadataHash::from_token_info(token_symbol, token_symbol, decimals); + + let faucet = create_existing_agglayer_faucet( + builder.rng_mut().draw_word(), + token_symbol, + decimals, + max_supply, + token_supply, + bridge_account.id(), + &origin_token_address, + origin_network, + scale, + metadata_hash, + ); + + assert_eq!(AggLayerFaucet::owner_account_id(&faucet)?, bridge_account.id()); + assert_eq!(AggLayerFaucet::origin_token_address(&faucet)?, origin_token_address); + assert_eq!(AggLayerFaucet::origin_network(&faucet)?, origin_network); + assert_eq!(AggLayerFaucet::scale(&faucet)?, scale); + + Ok(()) +} diff --git a/crates/miden-testing/tests/agglayer/global_index.rs b/crates/miden-testing/tests/agglayer/global_index.rs index 92c53ef61e..81809641ae 100644 --- a/crates/miden-testing/tests/agglayer/global_index.rs +++ b/crates/miden-testing/tests/agglayer/global_index.rs @@ -2,12 +2,13 @@ extern crate alloc; use alloc::sync::Arc; -use miden_agglayer::agglayer_library; use miden_agglayer::errors::{ ERR_BRIDGE_NOT_MAINNET, + ERR_BRIDGE_NOT_ROLLUP, ERR_LEADING_BITS_NON_ZERO, ERR_ROLLUP_INDEX_NON_ZERO, }; +use miden_agglayer::{GlobalIndex, agglayer_library}; use miden_assembly::{Assembler, DefaultSourceManager}; use miden_core_lib::CoreLibrary; use miden_processor::Program; @@ -15,17 +16,19 @@ use miden_testing::{ExecError, assert_execution_error}; use crate::agglayer::test_utils::execute_program_with_default_host; -fn assemble_process_global_index_program(global_index_be_u32_limbs: [u32; 8]) -> Program { - let [g0, g1, g2, g3, g4, g5, g6, g7] = global_index_be_u32_limbs; +fn assemble_process_global_index_program(global_index: GlobalIndex, proc_name: &str) -> Program { + // Convert GlobalIndex to 8 field elements (big-endian: [0]=MSB, [7]=LSB) + let elements = global_index.to_elements(); + let [g0, g1, g2, g3, g4, g5, g6, g7] = elements.try_into().unwrap(); let script_code = format!( r#" use miden::core::sys - use miden::agglayer::bridge_in + use agglayer::bridge::bridge_in begin push.{g7}.{g6}.{g5}.{g4}.{g3}.{g2}.{g1}.{g0} - exec.bridge_in::process_global_index_mainnet + exec.bridge_in::{proc_name} exec.sys::truncate_stack end "# @@ -40,23 +43,40 @@ fn assemble_process_global_index_program(global_index_be_u32_limbs: [u32; 8]) -> .unwrap() } +// MAINNET GLOBAL INDEX TESTS +// ================================================================================================ + #[tokio::test] async fn test_process_global_index_mainnet_returns_leaf_index() -> anyhow::Result<()> { - // 256-bit globalIndex encoded as 8 u32 limbs (big-endian): - // [top 191 bits = 0, mainnet flag = 1, rollup_index = 0, leaf_index = 2] - let global_index = [0, 0, 0, 0, 0, 1, 0, 2]; - let program = assemble_process_global_index_program(global_index); + // Global index format (32 bytes, big-endian like Solidity uint256): + // - bytes[0..20]: leading zeros + // - bytes[20..24]: mainnet_flag = 1 (BE u32) + // - bytes[24..28]: rollup_index = 0 (BE u32) + // - bytes[28..32]: leaf_index = 2 (BE u32) + let mut bytes = [0u8; 32]; + bytes[23] = 1; // mainnet flag = 1 (BE: LSB at byte 23) + bytes[31] = 2; // leaf index = 2 (BE: LSB at byte 31) + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_mainnet", + ); let exec_output = execute_program_with_default_host(program, None).await?; - assert_eq!(exec_output.stack[0].as_int(), 2); + assert_eq!(exec_output.stack[0].as_canonical_u64(), 2); Ok(()) } #[tokio::test] async fn test_process_global_index_mainnet_rejects_non_zero_leading_bits() { - let global_index = [1, 0, 0, 0, 0, 1, 0, 2]; - let program = assemble_process_global_index_program(global_index); + let mut bytes = [0u8; 32]; + bytes[3] = 1; // non-zero leading bits (BE: LSB of first u32 limb) + bytes[23] = 1; // mainnet flag = 1 + bytes[31] = 2; // leaf index = 2 + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_mainnet", + ); let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); assert_execution_error!(err, ERR_LEADING_BITS_NON_ZERO); @@ -64,9 +84,13 @@ async fn test_process_global_index_mainnet_rejects_non_zero_leading_bits() { #[tokio::test] async fn test_process_global_index_mainnet_rejects_flag_limb_upper_bits() { - // limb5 is the mainnet flag; only the lowest bit is allowed - let global_index = [0, 0, 0, 0, 0, 3, 0, 2]; - let program = assemble_process_global_index_program(global_index); + let mut bytes = [0u8; 32]; + bytes[23] = 3; // mainnet flag limb = 3 (upper bits set, only lowest bit allowed) + bytes[31] = 2; // leaf index = 2 + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_mainnet", + ); let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); assert_execution_error!(err, ERR_BRIDGE_NOT_MAINNET); @@ -74,9 +98,69 @@ async fn test_process_global_index_mainnet_rejects_flag_limb_upper_bits() { #[tokio::test] async fn test_process_global_index_mainnet_rejects_non_zero_rollup_index() { - let global_index = [0, 0, 0, 0, 0, 1, 7, 2]; - let program = assemble_process_global_index_program(global_index); + let mut bytes = [0u8; 32]; + bytes[23] = 1; // mainnet flag = 1 + bytes[27] = 7; // rollup index = 7 (BE: LSB at byte 27) + bytes[31] = 2; // leaf index = 2 + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_mainnet", + ); let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); assert_execution_error!(err, ERR_ROLLUP_INDEX_NON_ZERO); } + +// ROLLUP GLOBAL INDEX TESTS +// ================================================================================================ + +#[tokio::test] +async fn test_process_global_index_rollup_returns_leaf_and_rollup_index() -> anyhow::Result<()> { + // Global index for rollup: mainnet_flag=0, rollup_index=5, leaf_index=42 + let mut bytes = [0u8; 32]; + // mainnet flag = 0 (already zero) + bytes[27] = 5; // rollup index = 5 (BE: LSB at byte 27) + bytes[31] = 42; // leaf index = 42 (BE: LSB at byte 31) + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_rollup", + ); + + let exec_output = execute_program_with_default_host(program, None).await?; + + // process_global_index_rollup returns [leaf_index, rollup_index] + // stack[0] = leaf_index (top), stack[1] = rollup_index + assert_eq!(exec_output.stack[0].as_canonical_u64(), 42, "leaf_index should be 42"); + assert_eq!(exec_output.stack[1].as_canonical_u64(), 5, "rollup_index should be 5"); + Ok(()) +} + +#[tokio::test] +async fn test_process_global_index_rollup_rejects_non_zero_leading_bits() { + let mut bytes = [0u8; 32]; + bytes[3] = 1; // non-zero leading bits + bytes[27] = 5; // rollup index = 5 + bytes[31] = 42; // leaf index = 42 + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_rollup", + ); + + let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); + assert_execution_error!(err, ERR_LEADING_BITS_NON_ZERO); +} + +#[tokio::test] +async fn test_process_global_index_rollup_rejects_mainnet_flag() { + let mut bytes = [0u8; 32]; + bytes[23] = 1; // mainnet flag = 1 (should be 0 for rollup) + bytes[27] = 5; // rollup index = 5 + bytes[31] = 42; // leaf index = 42 + let program = assemble_process_global_index_program( + GlobalIndex::new(bytes), + "process_global_index_rollup", + ); + + let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); + assert_execution_error!(err, ERR_BRIDGE_NOT_ROLLUP); +} diff --git a/crates/miden-testing/tests/agglayer/leaf_utils.rs b/crates/miden-testing/tests/agglayer/leaf_utils.rs new file mode 100644 index 0000000000..d2f9d4d23a --- /dev/null +++ b/crates/miden-testing/tests/agglayer/leaf_utils.rs @@ -0,0 +1,199 @@ +extern crate alloc; + +use alloc::sync::Arc; +use alloc::vec::Vec; + +use miden_agglayer::agglayer_library; +use miden_agglayer::claim_note::Keccak256Output; +use miden_assembly::{Assembler, DefaultSourceManager}; +use miden_core_lib::CoreLibrary; +use miden_crypto::SequentialCommit; +use miden_processor::advice::AdviceInputs; +use miden_processor::utils::packed_u32_elements_to_bytes; +use miden_protocol::{Felt, Word}; +use miden_tx::utils::hex_to_bytes; + +use super::test_utils::{ + LEAF_VALUE_VECTORS_JSON, + LeafValueVector, + execute_program_with_default_host, +}; + +// HELPER FUNCTIONS +// ================================================================================================ + +fn felts_to_le_bytes(limbs: &[Felt]) -> Vec { + let mut bytes = Vec::with_capacity(limbs.len() * 4); + for limb in limbs.iter() { + let u32_value = limb.as_canonical_u64() as u32; + bytes.extend_from_slice(&u32_value.to_le_bytes()); + } + bytes +} + +// TESTS +// ================================================================================================ + +/// Test that the `pack_leaf_data` procedure produces the correct byte layout. +#[tokio::test] +async fn pack_leaf_data() -> anyhow::Result<()> { + let vector: LeafValueVector = + serde_json::from_str(LEAF_VALUE_VECTORS_JSON).expect("failed to parse leaf value vector"); + + let leaf_data = vector.to_leaf_data(); + + // Build expected bytes + let mut expected_packed_bytes: Vec = Vec::new(); + expected_packed_bytes.push(0u8); + expected_packed_bytes.extend_from_slice(&leaf_data.origin_network.to_be_bytes()); + expected_packed_bytes.extend_from_slice(leaf_data.origin_token_address.as_bytes()); + expected_packed_bytes.extend_from_slice(&leaf_data.destination_network.to_be_bytes()); + expected_packed_bytes.extend_from_slice(leaf_data.destination_address.as_bytes()); + expected_packed_bytes.extend_from_slice(leaf_data.amount.as_bytes()); + let metadata_hash_bytes: [u8; 32] = hex_to_bytes(&vector.metadata_hash).unwrap(); + expected_packed_bytes.extend_from_slice(&metadata_hash_bytes); + assert_eq!(expected_packed_bytes.len(), 113); + + let agglayer_lib = agglayer_library(); + let leaf_data_elements = leaf_data.to_elements(); + let leaf_data_bytes: Vec = packed_u32_elements_to_bytes(&leaf_data_elements); + assert_eq!( + leaf_data_bytes.len(), + 128, + "expected 8 words * 4 felts * 4 bytes per felt = 128 bytes" + ); + assert_eq!(leaf_data_bytes[116..], vec![0; 12], "the last 3 felts are pure padding"); + assert_eq!(leaf_data_bytes[3], expected_packed_bytes[0], "the first byte is the leaf type"); + assert_eq!( + leaf_data_bytes[4..8], + expected_packed_bytes[1..5], + "the next 4 bytes are the origin network" + ); + assert_eq!( + leaf_data_bytes[8..28], + expected_packed_bytes[5..25], + "the next 20 bytes are the origin token address" + ); + assert_eq!( + leaf_data_bytes[28..32], + expected_packed_bytes[25..29], + "the next 4 bytes are the destination network" + ); + assert_eq!( + leaf_data_bytes[32..52], + expected_packed_bytes[29..49], + "the next 20 bytes are the destination address" + ); + assert_eq!( + leaf_data_bytes[52..84], + expected_packed_bytes[49..81], + "the next 32 bytes are the amount" + ); + assert_eq!( + leaf_data_bytes[84..116], + expected_packed_bytes[81..113], + "the next 32 bytes are the metadata hash" + ); + + assert_eq!(leaf_data_bytes[3..116], expected_packed_bytes, "byte packing is as expected"); + + let key: Word = leaf_data.to_commitment(); + let advice_inputs = AdviceInputs::default().with_map(vec![(key, leaf_data_elements.clone())]); + + let source = format!( + r#" + use miden::core::mem + use agglayer::bridge::leaf_utils + + const LEAF_DATA_START_PTR = 0 + const CLAIM_LEAF_DATA_WORD_LEN = 8 + + begin + push.{key} + + adv.push_mapval + push.LEAF_DATA_START_PTR push.CLAIM_LEAF_DATA_WORD_LEN + exec.mem::pipe_preimage_to_memory drop + + exec.leaf_utils::pack_leaf_data + end + "# + ); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, Some(advice_inputs)).await?; + + // Read packed elements from memory at addresses 0..29 + let ctx = miden_processor::ContextId::root(); + + let packed_elements: Vec = (0..29u32) + .map(|addr| { + exec_output + .memory + .read_element(ctx, Felt::from(addr)) + .expect("address should be valid") + }) + .collect(); + + let packed_bytes: Vec = felts_to_le_bytes(&packed_elements); + + // push 3 more zero bytes for packing, since `pack_leaf_data` should leave us with the last 3 + // bytes set to 0 (prep for hashing, where padding bytes must be 0) + expected_packed_bytes.extend_from_slice(&[0u8; 3]); + + assert_eq!( + &packed_bytes, &expected_packed_bytes, + "Packed bytes don't match expected Solidity encoding" + ); + + Ok(()) +} + +#[tokio::test] +async fn get_leaf_value() -> anyhow::Result<()> { + let vector: LeafValueVector = + serde_json::from_str(LEAF_VALUE_VECTORS_JSON).expect("failed to parse leaf value vector"); + + let leaf_data = vector.to_leaf_data(); + let key: Word = leaf_data.to_commitment(); + let advice_inputs = AdviceInputs::default().with_map(vec![(key, leaf_data.to_elements())]); + + let source = format!( + r#" + use miden::core::sys + use agglayer::bridge::bridge_in + + begin + push.{key} + exec.bridge_in::get_leaf_value + exec.sys::truncate_stack + end + "# + ); + let agglayer_lib = agglayer_library(); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, Some(advice_inputs)).await?; + let computed_leaf_value: Vec = exec_output.stack[0..8].to_vec(); + let expected_leaf_value_bytes: [u8; 32] = + hex_to_bytes(&vector.leaf_value).expect("valid leaf value hex"); + let expected_leaf_value: Vec = + Keccak256Output::from(expected_leaf_value_bytes).to_elements(); + + assert_eq!(computed_leaf_value, expected_leaf_value); + Ok(()) +} diff --git a/crates/miden-testing/tests/agglayer/mmr_frontier.rs b/crates/miden-testing/tests/agglayer/mmr_frontier.rs index 367d221cc5..986520683e 100644 --- a/crates/miden-testing/tests/agglayer/mmr_frontier.rs +++ b/crates/miden-testing/tests/agglayer/mmr_frontier.rs @@ -1,15 +1,12 @@ use alloc::format; use alloc::string::ToString; -use miden_agglayer::agglayer_library; +use miden_agglayer::claim_note::SmtNode; +use miden_agglayer::{ExitRoot, agglayer_library}; use miden_crypto::hash::keccak::{Keccak256, Keccak256Digest}; use miden_protocol::utils::sync::LazyLock; use miden_standards::code_builder::CodeBuilder; use miden_testing::TransactionContextBuilder; -use serde::Deserialize; - -use super::test_utils::keccak_digest_to_word_strings; - // KECCAK MMR FRONTIER // ================================================================================================ @@ -76,7 +73,7 @@ impl KeccakMmrFrontier32 { async fn test_append_and_update_frontier() -> anyhow::Result<()> { let mut mmr_frontier = KeccakMmrFrontier32::<32>::new(); - let mut source = "use miden::agglayer::mmr_frontier32_keccak begin".to_string(); + let mut source = "use agglayer::bridge::mmr_frontier32_keccak begin".to_string(); for round in 0..32 { // construct the leaf from the hex representation of the round number @@ -84,7 +81,11 @@ async fn test_append_and_update_frontier() -> anyhow::Result<()> { let root = mmr_frontier.append_and_update_frontier(leaf); let num_leaves = mmr_frontier.num_leaves; - source.push_str(&leaf_assertion_code(leaf, root, num_leaves)); + source.push_str(&leaf_assertion_code( + SmtNode::new(leaf.into()), + ExitRoot::new(root.into()), + num_leaves, + )); } source.push_str("end"); @@ -108,11 +109,15 @@ async fn test_check_empty_mmr_root() -> anyhow::Result<()> { let zero_31 = *CANONICAL_ZEROS_32.get(31).expect("zeros should have 32 values total"); let empty_mmr_root = Keccak256::merge(&[zero_31, zero_31]); - let mut source = "use miden::agglayer::mmr_frontier32_keccak begin".to_string(); + let mut source = "use agglayer::bridge::mmr_frontier32_keccak begin".to_string(); for round in 1..=32 { // check that pushing the zero leaves into the MMR doesn't change its root - source.push_str(&leaf_assertion_code(zero_leaf, empty_mmr_root, round)); + source.push_str(&leaf_assertion_code( + SmtNode::new(zero_leaf.into()), + ExitRoot::new(empty_mmr_root.into()), + round, + )); } source.push_str("end"); @@ -137,39 +142,7 @@ async fn test_check_empty_mmr_root() -> anyhow::Result<()> { // Test vectors generated from: https://github.com/agglayer/agglayer-contracts // Run `make generate-solidity-test-vectors` to regenerate the test vectors. -/// Canonical zeros JSON embedded at compile time from the Foundry-generated file. -const CANONICAL_ZEROS_JSON: &str = - include_str!("../../../miden-agglayer/solidity-compat/test-vectors/canonical_zeros.json"); - -/// MMR frontier vectors JSON embedded at compile time from the Foundry-generated file. -const MMR_FRONTIER_VECTORS_JSON: &str = - include_str!("../../../miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json"); - -/// Deserialized canonical zeros from Solidity DepositContractBase.sol -#[derive(Debug, Deserialize)] -struct CanonicalZerosFile { - canonical_zeros: Vec, -} - -/// Deserialized MMR frontier vectors from Solidity DepositContractBase.sol -/// Uses parallel arrays for leaves, roots, and counts instead of array of objects -#[derive(Debug, Deserialize)] -struct MmrFrontierVectorsFile { - leaves: Vec, - roots: Vec, - counts: Vec, -} - -/// Lazily parsed canonical zeros from the JSON file. -static SOLIDITY_CANONICAL_ZEROS: LazyLock = LazyLock::new(|| { - serde_json::from_str(CANONICAL_ZEROS_JSON).expect("Failed to parse canonical zeros JSON") -}); - -/// Lazily parsed MMR frontier vectors from the JSON file. -static SOLIDITY_MMR_FRONTIER_VECTORS: LazyLock = LazyLock::new(|| { - serde_json::from_str(MMR_FRONTIER_VECTORS_JSON) - .expect("failed to parse MMR frontier vectors JSON") -}); +use super::test_utils::{SOLIDITY_CANONICAL_ZEROS, SOLIDITY_MMR_FRONTIER_VECTORS}; /// Verifies that the Rust KeccakMmrFrontier32 produces the same canonical zeros as Solidity. #[test] @@ -222,27 +195,23 @@ fn test_solidity_mmr_frontier_compatibility() { // HELPER FUNCTIONS // ================================================================================================ -fn leaf_assertion_code( - leaf: Keccak256Digest, - expected_root: Keccak256Digest, - num_leaves: u32, -) -> String { - let (leaf_hi, leaf_lo) = keccak_digest_to_word_strings(leaf); - let (root_hi, root_lo) = keccak_digest_to_word_strings(expected_root); +fn leaf_assertion_code(leaf: SmtNode, expected_root: ExitRoot, num_leaves: u32) -> String { + let [leaf_lo, leaf_hi] = leaf.to_words(); + let [root_lo, root_hi] = expected_root.to_words(); format!( r#" # load the provided leaf onto the stack - push.[{leaf_hi}] - push.[{leaf_lo}] + push.{leaf_hi} + push.{leaf_lo} # add this leaf to the MMR frontier exec.mmr_frontier32_keccak::append_and_update_frontier # => [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] # assert the root correctness after the first leaf was added - push.[{root_lo}] - push.[{root_hi}] + push.{root_lo} + push.{root_hi} movdnw.3 # => [EXPECTED_ROOT_LO, NEW_ROOT_LO, NEW_ROOT_HI, EXPECTED_ROOT_HI, new_leaf_count] diff --git a/crates/miden-testing/tests/agglayer/mod.rs b/crates/miden-testing/tests/agglayer/mod.rs index f96326ffa3..5d84a0cc9d 100644 --- a/crates/miden-testing/tests/agglayer/mod.rs +++ b/crates/miden-testing/tests/agglayer/mod.rs @@ -1,8 +1,10 @@ pub mod asset_conversion; mod bridge_in; mod bridge_out; -mod crypto_utils; +mod config_bridge; +mod faucet_helpers; mod global_index; +mod leaf_utils; mod mmr_frontier; mod solidity_miden_address_conversion; pub mod test_utils; diff --git a/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs b/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs index 2083a9dd36..3b7ba599cc 100644 --- a/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs +++ b/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs @@ -5,8 +5,15 @@ use alloc::sync::Arc; use miden_agglayer::{EthAddressFormat, agglayer_library}; use miden_assembly::{Assembler, DefaultSourceManager}; use miden_core_lib::CoreLibrary; -use miden_processor::fast::{ExecutionOutput, FastProcessor}; -use miden_processor::{AdviceInputs, DefaultHost, ExecutionError, Program, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ + DefaultHost, + ExecutionError, + ExecutionOutput, + FastProcessor, + Program, + StackInputs, +}; use miden_protocol::Felt; use miden_protocol::account::AccountId; use miden_protocol::address::NetworkId; @@ -36,10 +43,11 @@ async fn execute_program_with_default_host( let asset_conversion_lib = agglayer_library(); host.load_library(asset_conversion_lib.mast_forest()).unwrap(); - let stack_inputs = StackInputs::new(vec![]).unwrap(); + let stack_inputs = StackInputs::new(&[]).unwrap(); let advice_inputs = AdviceInputs::default(); - let processor = FastProcessor::new_debug(stack_inputs.as_slice(), advice_inputs); + let processor = + FastProcessor::new(stack_inputs).with_advice(advice_inputs).with_debugging(true); processor.execute(&program, &mut host).await } @@ -109,31 +117,31 @@ async fn test_ethereum_address_to_account_id_in_masm() -> anyhow::Result<()> { let eth_address = EthAddressFormat::from_account_id(*original_account_id); let address_felts = eth_address.to_elements().to_vec(); - let le: Vec = address_felts + let limbs: Vec = address_felts .iter() .map(|f| { - let val = f.as_int(); + let val = f.as_canonical_u64(); assert!(val <= u32::MAX as u64, "felt value {} exceeds u32::MAX", val); val as u32 }) .collect(); - assert_eq!(le[4], 0, "test {}: expected msw limb (le[4]) to be zero", idx); + let limb0 = limbs[0]; + let limb1 = limbs[1]; + let limb2 = limbs[2]; + let limb3 = limbs[3]; + let limb4 = limbs[4]; - let addr0 = le[0]; - let addr1 = le[1]; - let addr2 = le[2]; - let addr3 = le[3]; - let addr4 = le[4]; + assert_eq!(limb0, 0, "test {}: expected msb limb (limb0) to be zero", idx); let account_id_felts: [Felt; 2] = (*original_account_id).into(); - let expected_prefix = account_id_felts[0].as_int(); - let expected_suffix = account_id_felts[1].as_int(); + let expected_prefix = account_id_felts[0]; + let expected_suffix = account_id_felts[1]; let script_code = format!( r#" use miden::core::sys - use miden::agglayer::eth_address + use agglayer::common::eth_address begin push.{}.{}.{}.{}.{} @@ -141,7 +149,7 @@ async fn test_ethereum_address_to_account_id_in_masm() -> anyhow::Result<()> { exec.sys::truncate_stack end "#, - addr4, addr3, addr2, addr1, addr0 + limb4, limb3, limb2, limb1, limb0 ); let program = Assembler::new(Arc::new(DefaultSourceManager::default())) @@ -154,14 +162,13 @@ async fn test_ethereum_address_to_account_id_in_masm() -> anyhow::Result<()> { let exec_output = execute_program_with_default_host(program).await?; - let actual_prefix = exec_output.stack[0].as_int(); - let actual_suffix = exec_output.stack[1].as_int(); + let actual_suffix = exec_output.stack[0]; + let actual_prefix = exec_output.stack[1]; assert_eq!(actual_prefix, expected_prefix, "test {}: prefix mismatch", idx); assert_eq!(actual_suffix, expected_suffix, "test {}: suffix mismatch", idx); - let reconstructed_account_id = - AccountId::try_from([Felt::new(actual_prefix), Felt::new(actual_suffix)])?; + let reconstructed_account_id = AccountId::try_from_elements(actual_suffix, actual_prefix)?; assert_eq!( reconstructed_account_id, *original_account_id, diff --git a/crates/miden-testing/tests/agglayer/test_utils.rs b/crates/miden-testing/tests/agglayer/test_utils.rs index b77d99e1bf..f68b253738 100644 --- a/crates/miden-testing/tests/agglayer/test_utils.rs +++ b/crates/miden-testing/tests/agglayer/test_utils.rs @@ -1,27 +1,318 @@ extern crate alloc; -use alloc::vec; +use alloc::string::String; +use alloc::sync::Arc; use alloc::vec::Vec; -use miden_agglayer::agglayer_library; +use miden_agglayer::claim_note::{Keccak256Output, ProofData, SmtNode}; +use miden_agglayer::{ + EthAddressFormat, + EthAmount, + ExitRoot, + GlobalIndex, + LeafData, + MetadataHash, + agglayer_library, +}; +use miden_assembly::{Assembler, DefaultSourceManager}; use miden_core_lib::CoreLibrary; -use miden_crypto::hash::keccak::Keccak256Digest; -use miden_processor::fast::{ExecutionOutput, FastProcessor}; -use miden_processor::{AdviceInputs, DefaultHost, ExecutionError, Felt, Program, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ + DefaultHost, + ExecutionError, + ExecutionOutput, + FastProcessor, + Program, + StackInputs, +}; use miden_protocol::transaction::TransactionKernel; +use miden_protocol::utils::sync::LazyLock; +use miden_tx::utils::hex_to_bytes; +use serde::Deserialize; -/// Transforms the `[Keccak256Digest]` into two word strings: (`a, b, c, d`, `e, f, g, h`) -pub fn keccak_digest_to_word_strings(digest: Keccak256Digest) -> (String, String) { - let double_word = (*digest) - .chunks(4) - .map(|chunk| Felt::from(u32::from_le_bytes(chunk.try_into().unwrap())).to_string()) - .rev() - .collect::>(); +// EMBEDDED TEST VECTOR JSON FILES +// ================================================================================================ + +/// Claim asset test vectors JSON — contains both LeafData and ProofData from a real claimAsset +/// transaction. +const CLAIM_ASSET_VECTORS_JSON: &str = include_str!( + "../../../miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json" +); + +/// Bridge asset test vectors JSON — contains test data for an L1 bridgeAsset transaction. +const BRIDGE_ASSET_VECTORS_JSON: &str = include_str!( + "../../../miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json" +); + +/// Rollup deposit test vectors JSON — contains test data for a rollup deposit with two-level +/// Merkle proofs. +const ROLLUP_ASSET_VECTORS_JSON: &str = include_str!( + "../../../miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_rollup_tx.json" +); + +/// Leaf data test vectors JSON from the Foundry-generated file. +pub const LEAF_VALUE_VECTORS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json"); + +/// Merkle proof verification vectors JSON from the Foundry-generated file. +pub const MERKLE_PROOF_VECTORS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/merkle_proof_vectors.json"); + +/// Canonical zeros JSON from the Foundry-generated file. +pub const CANONICAL_ZEROS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/canonical_zeros.json"); + +/// MMR frontier vectors JSON from the Foundry-generated file. +pub const MMR_FRONTIER_VECTORS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json"); + +// SERDE HELPERS +// ================================================================================================ + +/// Deserializes a JSON value that may be either a number or a string into a `String`. +/// +/// Foundry's `vm.serializeUint` outputs JSON numbers for uint256 values. +/// This deserializer accepts both `"100"` (string) and `100` (number) forms. +fn deserialize_uint_to_string<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let value = serde_json::Value::deserialize(deserializer)?; + match value { + serde_json::Value::String(s) => Ok(s), + serde_json::Value::Number(n) => Ok(n.to_string()), + _ => Err(serde::de::Error::custom("expected a number or string for amount")), + } +} - (double_word[0..4].join(", "), double_word[4..8].join(", ")) +/// Deserializes a JSON array of values that may be either numbers or strings into `Vec`. +/// +/// Array-level counterpart of [`deserialize_uint_to_string`]. +fn deserialize_uint_vec_to_strings<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + let values = Vec::::deserialize(deserializer)?; + values + .into_iter() + .map(|v| match v { + serde_json::Value::String(s) => Ok(s), + serde_json::Value::Number(n) => Ok(n.to_string()), + _ => Err(serde::de::Error::custom("expected a number or string for amount")), + }) + .collect() +} + +// TEST VECTOR TYPES +// ================================================================================================ + +/// Deserialized leaf value test vector from Solidity-generated JSON. +#[derive(Debug, Deserialize)] +pub struct LeafValueVector { + pub origin_network: u32, + pub origin_token_address: String, + pub destination_network: u32, + pub destination_address: String, + #[serde(deserialize_with = "deserialize_uint_to_string")] + pub amount: String, + pub metadata_hash: String, + #[allow(dead_code)] + pub leaf_value: String, } -/// Execute a program with default host and optional advice inputs +impl LeafValueVector { + /// Converts this test vector into a `LeafData` instance. + pub fn to_leaf_data(&self) -> LeafData { + LeafData { + origin_network: self.origin_network, + origin_token_address: EthAddressFormat::from_hex(&self.origin_token_address) + .expect("valid origin token address hex"), + destination_network: self.destination_network, + destination_address: EthAddressFormat::from_hex(&self.destination_address) + .expect("valid destination address hex"), + amount: EthAmount::from_uint_str(&self.amount).expect("valid amount uint string"), + metadata_hash: MetadataHash::new( + hex_to_bytes(&self.metadata_hash).expect("valid metadata hash hex"), + ), + } + } +} + +/// Deserialized proof value test vector from Solidity-generated JSON. +/// Contains SMT proofs, exit roots, global index, and expected global exit root. +#[derive(Debug, Deserialize)] +pub struct ProofValueVector { + pub smt_proof_local_exit_root: Vec, + pub smt_proof_rollup_exit_root: Vec, + pub global_index: String, + pub mainnet_exit_root: String, + pub rollup_exit_root: String, + /// Expected global exit root: keccak256(mainnetExitRoot || rollupExitRoot) + #[allow(dead_code)] + pub global_exit_root: String, + pub claimed_global_index_hash_chain: String, +} + +impl ProofValueVector { + /// Converts this test vector into a `ProofData` instance. + pub fn to_proof_data(&self) -> ProofData { + let smt_proof_local: [SmtNode; 32] = self + .smt_proof_local_exit_root + .iter() + .map(|s| SmtNode::new(hex_to_bytes(s).expect("valid smt proof hex"))) + .collect::>() + .try_into() + .expect("expected 32 SMT proof nodes for local exit root"); + + let smt_proof_rollup: [SmtNode; 32] = self + .smt_proof_rollup_exit_root + .iter() + .map(|s| SmtNode::new(hex_to_bytes(s).expect("valid smt proof hex"))) + .collect::>() + .try_into() + .expect("expected 32 SMT proof nodes for rollup exit root"); + + ProofData { + smt_proof_local_exit_root: smt_proof_local, + smt_proof_rollup_exit_root: smt_proof_rollup, + global_index: GlobalIndex::from_hex(&self.global_index) + .expect("valid global index hex"), + mainnet_exit_root: Keccak256Output::new( + hex_to_bytes(&self.mainnet_exit_root).expect("valid mainnet exit root hex"), + ), + rollup_exit_root: Keccak256Output::new( + hex_to_bytes(&self.rollup_exit_root).expect("valid rollup exit root hex"), + ), + } + } +} + +/// Deserialized claim asset test vector from Solidity-generated JSON. +/// Contains both LeafData and ProofData from a real claimAsset transaction. +#[derive(Debug, Deserialize)] +pub struct ClaimAssetVector { + #[serde(flatten)] + pub proof: ProofValueVector, + + #[serde(flatten)] + pub leaf: LeafValueVector, +} + +/// Deserialized Merkle proof vectors from Solidity DepositContractBase.sol. +/// Uses parallel arrays for leaves and roots. For each element from leaves/roots there are 32 +/// elements from merkle_paths, which represent the merkle path for that leaf + root. +#[derive(Debug, Deserialize)] +pub struct MerkleProofVerificationFile { + pub leaves: Vec, + pub roots: Vec, + pub merkle_paths: Vec, +} + +/// Deserialized canonical zeros from Solidity DepositContractBase.sol. +#[derive(Debug, Deserialize)] +pub struct CanonicalZerosFile { + pub canonical_zeros: Vec, +} + +/// Deserialized MMR frontier vectors from Solidity DepositContractV2. +/// +/// Each leaf is produced by `getLeafValue` using the same hardcoded fields as `bridge_out.masm` +/// (leafType=0, originNetwork=64), parametrised by +/// a shared `origin_token_address`, `amounts[i]`, per-index +/// `destination_networks[i]` / `destination_addresses[i]`, and +/// `metadataHash = keccak256(abi.encode(token_name, token_symbol, token_decimals))`. +/// +/// Amounts are serialized as uint256 values (JSON numbers). +#[derive(Debug, Deserialize)] +pub struct MmrFrontierVectorsFile { + pub leaves: Vec, + pub roots: Vec, + pub counts: Vec, + #[serde(deserialize_with = "deserialize_uint_vec_to_strings")] + pub amounts: Vec, + pub origin_token_address: String, + pub destination_networks: Vec, + pub destination_addresses: Vec, + #[allow(dead_code)] + pub token_name: String, + pub token_symbol: String, + pub token_decimals: u8, +} + +// LAZY-PARSED TEST VECTORS +// ================================================================================================ + +/// Lazily parsed claim asset test vector from the JSON file. +pub static CLAIM_ASSET_VECTOR: LazyLock = LazyLock::new(|| { + serde_json::from_str(CLAIM_ASSET_VECTORS_JSON) + .expect("failed to parse claim asset vectors JSON") +}); + +/// Lazily parsed bridge asset test vector from the JSON file (locally simulated L1 transaction). +pub static CLAIM_ASSET_VECTOR_LOCAL: LazyLock = LazyLock::new(|| { + serde_json::from_str(BRIDGE_ASSET_VECTORS_JSON) + .expect("failed to parse bridge asset vectors JSON") +}); + +/// Lazily parsed rollup deposit test vector from the JSON file. +pub static CLAIM_ASSET_VECTOR_ROLLUP: LazyLock = LazyLock::new(|| { + serde_json::from_str(ROLLUP_ASSET_VECTORS_JSON) + .expect("failed to parse rollup asset vectors JSON") +}); + +/// Lazily parsed Merkle proof vectors from the JSON file. +pub static SOLIDITY_MERKLE_PROOF_VECTORS: LazyLock = + LazyLock::new(|| { + serde_json::from_str(MERKLE_PROOF_VECTORS_JSON) + .expect("failed to parse Merkle proof vectors JSON") + }); + +/// Lazily parsed canonical zeros from the JSON file. +pub static SOLIDITY_CANONICAL_ZEROS: LazyLock = LazyLock::new(|| { + serde_json::from_str(CANONICAL_ZEROS_JSON).expect("failed to parse canonical zeros JSON") +}); + +/// Lazily parsed MMR frontier vectors from the JSON file. +pub static SOLIDITY_MMR_FRONTIER_VECTORS: LazyLock = LazyLock::new(|| { + serde_json::from_str(MMR_FRONTIER_VECTORS_JSON) + .expect("failed to parse MMR frontier vectors JSON") +}); + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Identifies the source of claim data used in bridge-in tests. +#[derive(Debug, Clone, Copy)] +pub enum ClaimDataSource { + /// Real on-chain claimAsset data from claim_asset_vectors_real_tx.json. + Real, + /// Locally simulated bridgeAsset data from claim_asset_vectors_local_tx.json. + Simulated, + /// Rollup deposit data from claim_asset_vectors_rollup_tx.json. + Rollup, +} + +impl ClaimDataSource { + /// Returns the `(ProofData, LeafData, ExitRoot)` tuple for this data source. + pub fn get_data(self) -> (ProofData, LeafData, ExitRoot, Keccak256Output) { + let vector = match self { + ClaimDataSource::Real => &*CLAIM_ASSET_VECTOR, + ClaimDataSource::Simulated => &*CLAIM_ASSET_VECTOR_LOCAL, + ClaimDataSource::Rollup => &*CLAIM_ASSET_VECTOR_ROLLUP, + }; + let ger = ExitRoot::new( + hex_to_bytes(&vector.proof.global_exit_root).expect("valid global exit root hex"), + ); + let cgi_chain_hash = Keccak256Output::new( + hex_to_bytes(&vector.proof.claimed_global_index_hash_chain) + .expect("invalid CGI chain hash"), + ); + + (vector.proof.to_proof_data(), vector.leaf.to_leaf_data(), ger, cgi_chain_hash) + } +} + +/// Execute a program with a default host and optional advice inputs. pub async fn execute_program_with_default_host( program: Program, advice_inputs: Option, @@ -34,7 +325,6 @@ pub async fn execute_program_with_default_host( let std_lib = CoreLibrary::default(); host.load_library(std_lib.mast_forest()).unwrap(); - // Register handlers from std_lib for (event_name, handler) in std_lib.handlers() { host.register_handler(event_name, handler)?; } @@ -42,86 +332,38 @@ pub async fn execute_program_with_default_host( let agglayer_lib = agglayer_library(); host.load_library(agglayer_lib.mast_forest()).unwrap(); - let stack_inputs = StackInputs::new(vec![]).unwrap(); + let stack_inputs = StackInputs::new(&[]).unwrap(); let advice_inputs = advice_inputs.unwrap_or_default(); - let processor = FastProcessor::new_debug(stack_inputs.as_slice(), advice_inputs); + let processor = + FastProcessor::new(stack_inputs).with_advice(advice_inputs).with_debugging(true); processor.execute(&program, &mut host).await } -// TESTING HELPERS -// ================================================================================================ +/// Execute a MASM script with the default host +pub async fn execute_masm_script(script_code: &str) -> Result { + let agglayer_lib = agglayer_library(); -/// Type alias for the complex return type of claim_note_test_inputs. -/// -/// Contains native types for the new ClaimNoteParams structure: -/// - smt_proof_local_exit_root: `Vec<[u8; 32]>` (256 bytes32 values) -/// - smt_proof_rollup_exit_root: `Vec<[u8; 32]>` (256 bytes32 values) -/// - global_index: [u32; 8] -/// - mainnet_exit_root: [u8; 32] -/// - rollup_exit_root: [u8; 32] -/// - origin_network: u32 -/// - origin_token_address: [u8; 20] -/// - destination_network: u32 -/// - metadata: [u32; 8] -pub type ClaimNoteTestInputs = ( - Vec<[u8; 32]>, - Vec<[u8; 32]>, - [u32; 8], - [u8; 32], - [u8; 32], - u32, - [u8; 20], - u32, - [u32; 8], -); + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib) + .unwrap() + .assemble_program(script_code) + .unwrap(); -/// Returns dummy test inputs for creating CLAIM notes with native types. -/// -/// This is a convenience function for testing that provides realistic dummy data -/// for all the agglayer claimAsset function inputs using native types. -/// -/// # Returns -/// A tuple containing native types for the new ClaimNoteParams structure -pub fn claim_note_test_inputs() -> ClaimNoteTestInputs { - // Create SMT proofs with 32 bytes32 values each (SMT path depth) - let smt_proof_local_exit_root = vec![[0u8; 32]; 32]; - let smt_proof_rollup_exit_root = vec![[0u8; 32]; 32]; - // Global index format: [top 5 limbs = 0, mainnet_flag = 1, rollup_index = 0, leaf_index = 2] - let global_index = [0u32, 0, 0, 0, 0, 1, 0, 2]; - - let mainnet_exit_root: [u8; 32] = [ - 0xe3, 0xd3, 0x3b, 0x7e, 0x1f, 0x64, 0xb4, 0x04, 0x47, 0x2f, 0x53, 0xd1, 0xe4, 0x56, 0xc9, - 0xfa, 0x02, 0x47, 0x03, 0x13, 0x72, 0xa3, 0x08, 0x0f, 0x82, 0xf2, 0x57, 0xa2, 0x60, 0x8a, - 0x63, 0x1f, - ]; - - let rollup_exit_root: [u8; 32] = [ - 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, - 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x88, 0x99, - ]; - - let origin_network = 1u32; - - let origin_token_address: [u8; 20] = [ - 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x88, 0x99, 0xaa, 0xbb, 0xcc, - ]; - - let destination_network = 2u32; - - let metadata: [u32; 8] = [0; 8]; - - ( - smt_proof_local_exit_root, - smt_proof_rollup_exit_root, - global_index, - mainnet_exit_root, - rollup_exit_root, - origin_network, - origin_token_address, - destination_network, - metadata, - ) + execute_program_with_default_host(program, None).await +} + +/// Helper to assert execution fails with a specific error message +pub async fn assert_execution_fails_with(script_code: &str, expected_error: &str) { + let result = execute_masm_script(script_code).await; + assert!(result.is_err(), "Expected execution to fail but it succeeded"); + let error_msg = result.unwrap_err().to_string(); + assert!( + error_msg.contains(expected_error), + "Expected error containing '{}', got: {}", + expected_error, + error_msg + ); } diff --git a/crates/miden-testing/tests/agglayer/update_ger.rs b/crates/miden-testing/tests/agglayer/update_ger.rs index af754e21c0..ed8126a0d0 100644 --- a/crates/miden-testing/tests/agglayer/update_ger.rs +++ b/crates/miden-testing/tests/agglayer/update_ger.rs @@ -1,27 +1,76 @@ -use miden_agglayer::{ExitRoot, UpdateGerNote, create_existing_bridge_account}; -use miden_protocol::Word; -use miden_protocol::account::StorageSlotName; +extern crate alloc; + +use alloc::string::String; +use alloc::sync::Arc; +use alloc::vec::Vec; + +use miden_agglayer::{ + AggLayerBridge, + ExitRoot, + UpdateGerNote, + agglayer_library, + create_existing_bridge_account, +}; +use miden_assembly::{Assembler, DefaultSourceManager}; +use miden_core_lib::CoreLibrary; +use miden_core_lib::handlers::keccak256::KeccakPreimage; +use miden_crypto::Felt; +use miden_processor::utils::{bytes_to_packed_u32_elements, packed_u32_elements_to_bytes}; use miden_protocol::account::auth::AuthScheme; use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; +use miden_protocol::utils::sync::LazyLock; use miden_testing::{Auth, MockChain}; +use miden_tx::utils::hex_to_bytes; +use serde::Deserialize; + +use super::test_utils::execute_program_with_default_host; + +// EXIT ROOT TEST VECTORS +// ================================================================================================ +// Test vectors generated from Solidity's GlobalExitRootLib.calculateGlobalExitRoot +// Run `forge test --match-contract ExitRootsTestVectors` to regenerate. + +/// Exit roots JSON embedded at compile time from the Foundry-generated file. +const EXIT_ROOTS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/exit_roots.json"); + +/// Deserialized exit root vectors from Solidity GlobalExitRootLib +#[derive(Debug, Deserialize)] +struct ExitRootsFile { + mainnet_exit_roots: Vec, + rollup_exit_roots: Vec, + global_exit_roots: Vec, +} + +/// Lazily parsed exit root vectors from the JSON file. +static EXIT_ROOTS_VECTORS: LazyLock = LazyLock::new(|| { + serde_json::from_str(EXIT_ROOTS_JSON).expect("failed to parse exit roots JSON") +}); #[tokio::test] -async fn test_update_ger_note_updates_storage() -> anyhow::Result<()> { +async fn update_ger_note_updates_storage() -> anyhow::Result<()> { let mut builder = MockChain::builder(); + // CREATE BRIDGE ADMIN ACCOUNT (not used in this test, but distinct from GER manager) + // -------------------------------------------------------------------------------------------- + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT (note sender) + // -------------------------------------------------------------------------------------------- + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + // CREATE BRIDGE ACCOUNT // -------------------------------------------------------------------------------------------- let bridge_seed = builder.rng_mut().draw_word(); - let bridge_account = create_existing_bridge_account(bridge_seed); + let bridge_account = + create_existing_bridge_account(bridge_seed, bridge_admin.id(), ger_manager.id()); builder.add_account(bridge_account.clone())?; - // CREATE USER ACCOUNT (NOTE SENDER) - // -------------------------------------------------------------------------------------------- - let user_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - builder.add_account(user_account.clone())?; - // CREATE UPDATE_GER NOTE WITH 8 STORAGE ITEMS (NEW GER AS TWO WORDS) // -------------------------------------------------------------------------------------------- @@ -32,9 +81,9 @@ async fn test_update_ger_note_updates_storage() -> anyhow::Result<()> { ]; let ger = ExitRoot::from(ger_bytes); let update_ger_note = - UpdateGerNote::create(ger, user_account.id(), bridge_account.id(), builder.rng_mut())?; + UpdateGerNote::create(ger, ger_manager.id(), bridge_account.id(), builder.rng_mut())?; - builder.add_output_note(OutputNote::Full(update_ger_note.clone())); + builder.add_output_note(RawOutputNote::Full(update_ger_note.clone())); let mock_chain = builder.build()?; // EXECUTE UPDATE_GER NOTE AGAINST BRIDGE ACCOUNT @@ -44,23 +93,175 @@ async fn test_update_ger_note_updates_storage() -> anyhow::Result<()> { .build()?; let executed_transaction = tx_context.execute().await?; - // VERIFY GER WAS UPDATED IN STORAGE + // VERIFY GER HASH WAS STORED IN MAP // -------------------------------------------------------------------------------------------- let mut updated_bridge_account = bridge_account.clone(); updated_bridge_account.apply_delta(executed_transaction.account_delta())?; - let ger_upper = updated_bridge_account - .storage() - .get_item(&StorageSlotName::new("miden::agglayer::bridge::ger_upper")?) - .unwrap(); - let ger_lower = updated_bridge_account - .storage() - .get_item(&StorageSlotName::new("miden::agglayer::bridge::ger_lower")?) + let is_registered = AggLayerBridge::is_ger_registered(ger, updated_bridge_account)?; + assert!(is_registered, "GER was not registered in the bridge account"); + + Ok(()) +} + +/// Tests compute_ger with known mainnet and rollup exit roots. +/// +/// The GER (Global Exit Root) is computed as keccak256(mainnet_exit_root || rollup_exit_root). +#[tokio::test] +async fn compute_ger() -> anyhow::Result<()> { + let agglayer_lib = agglayer_library(); + let vectors = &*EXIT_ROOTS_VECTORS; + + for i in 0..vectors.mainnet_exit_roots.len() { + let mainnet_exit_root_bytes = + hex_to_bytes(vectors.mainnet_exit_roots[i].as_str()).expect("invalid hex string"); + let rollup_exit_root_bytes = + hex_to_bytes(vectors.rollup_exit_roots[i].as_str()).expect("invalid hex string"); + let expected_ger_bytes = + hex_to_bytes(vectors.global_exit_roots[i].as_str()).expect("invalid hex string"); + + // Convert expected GER to felts for comparison + let expected_ger_exit_root = ExitRoot::from(expected_ger_bytes); + let expected_ger_felts = expected_ger_exit_root.to_elements(); + + // Computed GER using keccak256 + let ger_preimage: Vec = + [mainnet_exit_root_bytes.as_ref(), rollup_exit_root_bytes.as_ref()].concat(); + let ger_preimage = KeccakPreimage::new(ger_preimage); + let computed_ger_felts: Vec = ger_preimage.digest().as_ref().to_vec(); + + assert_eq!( + computed_ger_felts, expected_ger_felts, + "Computed GER mismatch for test vector {}", + i + ); + + // Convert exit roots to packed u32 felts for memory initialization + let mainnet_felts = ExitRoot::from(mainnet_exit_root_bytes).to_elements(); + let rollup_felts = ExitRoot::from(rollup_exit_root_bytes).to_elements(); + + // Build memory initialization: mainnet at ptr 0, rollup at ptr 8 + let mem_init: Vec = mainnet_felts + .iter() + .chain(rollup_felts.iter()) + .enumerate() + .map(|(idx, f)| format!("push.{} mem_store.{}", f.as_canonical_u64(), idx)) + .collect(); + let mem_init_code = mem_init.join("\n"); + + let source = format!( + r#" + use miden::core::sys + use agglayer::bridge::bridge_in + + begin + # Initialize memory with exit roots + {mem_init_code} + + # Call compute_ger with pointer to exit roots + push.0 + exec.bridge_in::compute_ger + exec.sys::truncate_stack + end + "# + ); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, None).await?; + + let result_digest: Vec = exec_output.stack[0..8].to_vec(); + + assert_eq!(result_digest, expected_ger_felts, "GER mismatch for test vector {}", i); + } + + Ok(()) +} + +/// Tests compute_ger with known mainnet and rollup exit roots. +/// +/// The GER (Global Exit Root) is computed as keccak256(mainnet_exit_root || rollup_exit_root). +#[tokio::test] +async fn test_compute_ger_basic() -> anyhow::Result<()> { + let agglayer_lib = agglayer_library(); + + // Define test exit roots (32 bytes each) + let mainnet_exit_root: [u8; 32] = [ + 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, + 0x77, 0x88, + ]; + + let rollup_exit_root: [u8; 32] = [ + 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, + 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, + ]; + + // Concatenate the two roots (64 bytes total) + let mut ger_preimage = Vec::with_capacity(64); + ger_preimage.extend_from_slice(&mainnet_exit_root); + ger_preimage.extend_from_slice(&rollup_exit_root); + + // Compute expected GER using keccak256 + let expected_ger_preimage = KeccakPreimage::new(ger_preimage.clone()); + let expected_ger_felts: [Felt; 8] = expected_ger_preimage.digest().as_ref().try_into().unwrap(); + + let ger_bytes: [u8; 32] = packed_u32_elements_to_bytes(&expected_ger_felts).try_into().unwrap(); + + let ger = ExitRoot::from(ger_bytes); + // sanity check + assert_eq!(ger.to_elements(), expected_ger_felts); + + // Convert exit roots to packed u32 felts for memory initialization + let mainnet_felts = bytes_to_packed_u32_elements(&mainnet_exit_root); + let rollup_felts = bytes_to_packed_u32_elements(&rollup_exit_root); + + // Build memory initialization: mainnet at ptr 0, rollup at ptr 8 + let mem_init: Vec = mainnet_felts + .iter() + .chain(rollup_felts.iter()) + .enumerate() + .map(|(i, f)| format!("push.{} mem_store.{}", f.as_canonical_u64(), i)) + .collect(); + let mem_init_code = mem_init.join("\n"); + + let source = format!( + r#" + use miden::core::sys + use agglayer::bridge::bridge_in + + begin + # Initialize memory with exit roots + {mem_init_code} + + # Call compute_ger with pointer to exit roots + push.0 + exec.bridge_in::compute_ger + exec.sys::truncate_stack + end + "# + ); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) .unwrap(); - let expected_lower: Word = ger.to_elements()[0..4].try_into().unwrap(); - let expected_upper: Word = ger.to_elements()[4..8].try_into().unwrap(); - assert_eq!(ger_upper, expected_upper); - assert_eq!(ger_lower, expected_lower); + + let exec_output = execute_program_with_default_host(program, None).await?; + + let result_digest: Vec = exec_output.stack[0..8].to_vec(); + + assert_eq!(result_digest, expected_ger_felts); Ok(()) } diff --git a/crates/miden-testing/tests/auth/hybrid_multisig.rs b/crates/miden-testing/tests/auth/hybrid_multisig.rs index 87f12d16cc..4682a48f42 100644 --- a/crates/miden-testing/tests/auth/hybrid_multisig.rs +++ b/crates/miden-testing/tests/auth/hybrid_multisig.rs @@ -1,5 +1,5 @@ -use miden_processor::AdviceInputs; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::advice::AdviceInputs; +use miden_processor::crypto::random::RandomCoin; use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKey}; use miden_protocol::account::{ Account, @@ -14,19 +14,17 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::vm::AdviceMap; use miden_protocol::{Felt, Hasher, Word}; use miden_standards::account::auth::AuthMultisig; use miden_standards::account::components::multisig_library; -use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; -use miden_standards::errors::standards::ERR_TX_ALREADY_EXECUTED; use miden_standards::note::P2idNote; use miden_standards::testing::account_interface::get_public_keys_from_account; use miden_testing::utils::create_spawn_note; -use miden_testing::{Auth, MockChainBuilder, assert_transaction_executor_error}; +use miden_testing::{Auth, MockChainBuilder}; use miden_tx::TransactionExecutorError; use miden_tx::auth::{BasicAuthenticator, SigningInputs, TransactionAuthenticator}; use rand::SeedableRng; @@ -54,7 +52,7 @@ fn setup_keys_and_authenticators( for index in 0..num_approvers { let sec_key = if index % 2 == 0 { - AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng) + AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng) } else { AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng) }; @@ -65,7 +63,7 @@ fn setup_keys_and_authenticators( // Alternate scheme IDs between Falcon (2) and ECDSA (1) if index % 2 == 0 { - auth_schemes.push(AuthScheme::Falcon512Rpo); + auth_schemes.push(AuthScheme::Falcon512Poseidon2); } else { auth_schemes.push(AuthScheme::EcdsaK256Keccak); } @@ -89,7 +87,7 @@ fn create_multisig_account( ) -> anyhow::Result { let approvers = approvers .iter() - .map(|(pub_key, auth_scheme)| (pub_key.to_commitment().into(), *auth_scheme)) + .map(|(pub_key, auth_scheme)| (pub_key.to_commitment(), *auth_scheme)) .collect(); let multisig_account = AccountBuilder::new([0; 32]) @@ -156,7 +154,7 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Execute transaction without signatures - should fail let tx_context_init = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt) .build()?; @@ -179,7 +177,7 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Execute transaction with signatures - should succeed let tx_context_execute = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .add_signature(public_keys[0].to_commitment(), msg, sig_1) .add_signature(public_keys[1].to_commitment(), msg, sig_2) .auth_args(salt) @@ -196,7 +194,7 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { multisig_account .vault() .get_balance(AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?)?, - multisig_starting_balance - output_note_asset.unwrap_fungible().amount() + multisig_starting_balance - output_note_asset.unwrap_fungible().amount().inner() ); Ok(()) @@ -285,89 +283,6 @@ async fn test_multisig_2_of_4_all_signer_combinations() -> anyhow::Result<()> { Ok(()) } -/// Tests multisig replay protection to prevent transaction re-execution. -/// -/// This test verifies that a 2-of-3 multisig account properly prevents replay attacks -/// by rejecting attempts to execute the same transaction twice. The first execution -/// should succeed with valid signatures, but the second attempt with identical -/// parameters should fail with ERR_TX_ALREADY_EXECUTED. -/// -/// **Roles:** -/// - 3 Approvers (2 signers required) -/// - 1 Multisig Contract -#[tokio::test] -async fn test_multisig_replay_protection() -> anyhow::Result<()> { - // Setup keys and authenticators (3 approvers, but only 2 signers) - let (_secret_keys, auth_schemes, public_keys, authenticators) = - setup_keys_and_authenticators(3, 2)?; - - let approvers = public_keys - .iter() - .zip(auth_schemes.iter()) - .map(|(pk, scheme)| (pk.clone(), *scheme)) - .collect::>(); - - // Create 2/3 multisig account - let multisig_account = create_multisig_account(2, &approvers, 20, vec![])?; - - let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) - .unwrap() - .build() - .unwrap(); - - let salt = Word::from([Felt::new(3); 4]); - - // Execute transaction without signatures first to get tx summary - let tx_context_init = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .auth_args(salt) - .build()?; - - let tx_summary = match tx_context_init.execute().await.unwrap_err() { - TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), - }; - - // Get signatures from 2 of the 3 approvers - let msg = tx_summary.as_ref().to_commitment(); - let tx_summary = SigningInputs::TransactionSummary(tx_summary); - - let sig_1 = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary) - .await?; - let sig_2 = authenticators[1] - .get_signature(public_keys[1].to_commitment(), &tx_summary) - .await?; - - // Execute transaction with signatures - should succeed (first execution) - let tx_context_execute = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) - .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) - .auth_args(salt) - .build()?; - - let executed_tx = tx_context_execute.execute().await.expect("First transaction should succeed"); - - // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_tx)?; - mock_chain.prove_next_block()?; - - // Attempt to execute the same transaction again - should fail due to replay protection - let tx_context_replay = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .add_signature(public_keys[0].to_commitment(), msg, sig_1) - .add_signature(public_keys[1].to_commitment(), msg, sig_2) - .auth_args(salt) - .build()?; - - // This should fail due to replay protection - let result = tx_context_replay.execute().await; - assert_transaction_executor_error!(result, ERR_TX_ALREADY_EXECUTED); - - Ok(()) -} - /// Tests multisig signer update functionality. /// /// This test verifies that a multisig account can: @@ -451,7 +366,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; @@ -590,7 +505,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { vec![output_note_asset], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::empty()), + &mut RandomCoin::new(Word::empty()), )?; // Create a new spawn note for the second transaction @@ -601,13 +516,13 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Build the new mock chain with the updated account and notes let mut new_mock_chain_builder = MockChainBuilder::with_accounts([updated_multisig_account.clone()]).unwrap(); - new_mock_chain_builder.add_output_note(OutputNote::Full(input_note_new.clone())); + new_mock_chain_builder.add_output_note(RawOutputNote::Full(input_note_new.clone())); let new_mock_chain = new_mock_chain_builder.build().unwrap(); // Execute transaction without signatures first to get tx summary let tx_context_init_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt_new) .build()?; @@ -636,7 +551,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Execute transaction with new signatures - should succeed let tx_context_execute_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note_new)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_new)]) .add_signature(new_public_keys[0].to_commitment(), msg_new, sig_1_new) .add_signature(new_public_keys[1].to_commitment(), msg_new, sig_2_new) .add_signature(new_public_keys[2].to_commitment(), msg_new, sig_3_new) @@ -712,7 +627,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Create transaction script let tx_script = CodeBuilder::default() .with_dynamically_linked_library(multisig_library())? - .compile_tx_script("begin\n call.::multisig::update_signers_and_threshold\nend")?; + .compile_tx_script("begin\n call.::miden::standards::components::auth::multisig::update_signers_and_threshold\nend")?; let advice_inputs = AdviceInputs { map: advice_map, ..Default::default() }; @@ -925,7 +840,7 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; @@ -994,168 +909,3 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu Ok(()) } - -/// Tests that 1-of-2 approvers can consume a note but 2-of-2 are required to send a note. -/// -/// This test verifies that a multisig account with 2 approvers and threshold 2, but a procedure -/// threshold of 1 for note consumption, can: -/// 1. Consume a note when only one approver signs the transaction -/// 2. Send a note only when both approvers sign the transaction (default threshold) -#[tokio::test] -async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { - // Setup keys and authenticators - let (_secret_keys, auth_schemes, public_keys, authenticators) = - setup_keys_and_authenticators(2, 2)?; - - let proc_threshold_map = vec![(BasicWallet::receive_asset_digest(), 1)]; - - let approvers = public_keys - .iter() - .zip(auth_schemes.iter()) - .map(|(pk, scheme)| (pk.clone(), *scheme)) - .collect::>(); - - // Create multisig account - let multisig_starting_balance = 10u64; - let mut multisig_account = - create_multisig_account(2, &approvers, multisig_starting_balance, proc_threshold_map)?; - - // SECTION 1: Test note consumption with 1 signature - // ================================================================================ - - // 1. create a mock note from some random account - let mut mock_chain_builder = - MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); - - let note = mock_chain_builder.add_p2id_note( - multisig_account.id(), - multisig_account.id(), - &[FungibleAsset::mock(1)], - NoteType::Public, - )?; - - let mut mock_chain = mock_chain_builder.build()?; - - // 2. consume without signatures - let salt = Word::from([Felt::new(1); 4]); - let tx_context = mock_chain - .build_tx_context(multisig_account.id(), &[note.id()], &[])? - .auth_args(salt) - .build()?; - - let tx_summary = match tx_context.execute().await.unwrap_err() { - TransactionExecutorError::Unauthorized(tx_summary) => tx_summary, - error => panic!("expected abort with tx summary: {error:?}"), - }; - - // 3. get signature from one approver - let msg = tx_summary.as_ref().to_commitment(); - let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary.clone()); - let sig = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) - .await?; - - // 4. execute with signature - let tx_result = mock_chain - .build_tx_context(multisig_account.id(), &[note.id()], &[])? - .add_signature(public_keys[0].to_commitment(), msg, sig) - .auth_args(salt) - .build()? - .execute() - .await; - - assert!(tx_result.is_ok(), "Note consumption with 1 signature should succeed"); - - // Apply the transaction to the account - multisig_account.apply_delta(tx_result.as_ref().unwrap().account_delta())?; - mock_chain.add_pending_executed_transaction(&tx_result.unwrap())?; - mock_chain.prove_next_block()?; - - // SECTION 2: Test note sending requires 2 signatures - // ================================================================================ - - let salt2 = Word::from([Felt::new(2); 4]); - - // Create output note to send 5 units from the account - let output_note = P2idNote::create( - multisig_account.id(), - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), - vec![FungibleAsset::mock(5)], - NoteType::Public, - Default::default(), - &mut RpoRandomCoin::new(Word::from([Felt::new(42); 4])), - )?; - let multisig_account_interface = AccountInterface::from_account(&multisig_account); - let send_note_transaction_script = - multisig_account_interface.build_send_notes_script(&[output_note.clone().into()], None)?; - - // Execute transaction without signatures to get tx summary - let tx_context_init = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) - .tx_script(send_note_transaction_script.clone()) - .auth_args(salt2) - .build()?; - - let tx_summary2 = match tx_context_init.execute().await.unwrap_err() { - TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), - }; - // Get signature from only ONE approver - let msg2 = tx_summary2.as_ref().to_commitment(); - let tx_summary2_signing = SigningInputs::TransactionSummary(tx_summary2.clone()); - - let sig_1 = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary2_signing) - .await?; - - // Try to execute with only 1 signature - should FAIL - let tx_context_one_sig = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) - .add_signature(public_keys[0].to_commitment(), msg2, sig_1) - .tx_script(send_note_transaction_script.clone()) - .auth_args(salt2) - .build()?; - - let result = tx_context_one_sig.execute().await; - match result { - Err(TransactionExecutorError::Unauthorized(_)) => { - // Expected: transaction should fail with insufficient signatures - }, - _ => panic!( - "Transaction should fail with Unauthorized error when only 1 signature provided for note sending" - ), - } - - // Now get signatures from BOTH approvers - let sig_1 = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary2_signing) - .await?; - let sig_2 = authenticators[1] - .get_signature(public_keys[1].to_commitment(), &tx_summary2_signing) - .await?; - - // Execute with 2 signatures - should SUCCEED - let result = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) - .add_signature(public_keys[0].to_commitment(), msg2, sig_1) - .add_signature(public_keys[1].to_commitment(), msg2, sig_2) - .auth_args(salt2) - .tx_script(send_note_transaction_script) - .build()? - .execute() - .await; - - assert!(result.is_ok(), "Transaction should succeed with 2 signatures for note sending"); - - // Apply the transaction to the account - multisig_account.apply_delta(result.as_ref().unwrap().account_delta())?; - mock_chain.add_pending_executed_transaction(&result.unwrap())?; - mock_chain.prove_next_block()?; - - assert_eq!(multisig_account.vault().get_balance(FungibleAsset::mock_issuer())?, 6); - - Ok(()) -} diff --git a/crates/miden-testing/tests/auth/mod.rs b/crates/miden-testing/tests/auth/mod.rs index d7536fd219..33d6f35bde 100644 --- a/crates/miden-testing/tests/auth/mod.rs +++ b/crates/miden-testing/tests/auth/mod.rs @@ -3,3 +3,5 @@ mod singlesig_acl; mod multisig; mod hybrid_multisig; + +mod multisig_psm; diff --git a/crates/miden-testing/tests/auth/multisig.rs b/crates/miden-testing/tests/auth/multisig.rs index 48b16fa578..958687451c 100644 --- a/crates/miden-testing/tests/auth/multisig.rs +++ b/crates/miden-testing/tests/auth/multisig.rs @@ -1,5 +1,5 @@ -use miden_processor::AdviceInputs; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::advice::AdviceInputs; +use miden_processor::crypto::random::RandomCoin; use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKey}; use miden_protocol::account::{ Account, @@ -14,7 +14,7 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::vm::AdviceMap; use miden_protocol::{Felt, Hasher, Word}; use miden_standards::account::auth::AuthMultisig; @@ -22,7 +22,10 @@ use miden_standards::account::components::multisig_library; use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; -use miden_standards::errors::standards::ERR_TX_ALREADY_EXECUTED; +use miden_standards::errors::standards::{ + ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS, + ERR_TX_ALREADY_EXECUTED, +}; use miden_standards::note::P2idNote; use miden_standards::testing::account_interface::get_public_keys_from_account; use miden_testing::utils::create_spawn_note; @@ -57,7 +60,9 @@ fn setup_keys_and_authenticators_with_scheme( for _ in 0..num_approvers { let sec_key = match auth_scheme { AuthScheme::EcdsaK256Keccak => AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng), - AuthScheme::Falcon512Rpo => AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng), + AuthScheme::Falcon512Poseidon2 => { + AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng) + }, _ => anyhow::bail!("unsupported auth scheme for this test: {auth_scheme:?}"), }; let pub_key = sec_key.public_key(); @@ -85,7 +90,7 @@ fn create_multisig_account( ) -> anyhow::Result { let approvers = approvers .iter() - .map(|(pub_key, auth_scheme)| (pub_key.to_commitment().into(), *auth_scheme)) + .map(|(pub_key, auth_scheme)| (pub_key.to_commitment(), *auth_scheme)) .collect(); let multisig_account = AccountBuilder::new([0; 32]) @@ -114,7 +119,7 @@ fn create_multisig_account( /// - 1 Multisig Contract #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_2_of_2_with_note_creation( #[case] auth_scheme: AuthScheme, @@ -157,7 +162,7 @@ async fn test_multisig_2_of_2_with_note_creation( // Execute transaction without signatures - should fail let tx_context_init = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt) .build()?; @@ -180,7 +185,7 @@ async fn test_multisig_2_of_2_with_note_creation( // Execute transaction with signatures - should succeed let tx_context_execute = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .add_signature(public_keys[0].to_commitment(), msg, sig_1) .add_signature(public_keys[1].to_commitment(), msg, sig_2) .auth_args(salt) @@ -197,7 +202,7 @@ async fn test_multisig_2_of_2_with_note_creation( multisig_account .vault() .get_balance(AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?)?, - multisig_starting_balance - output_note_asset.unwrap_fungible().amount() + multisig_starting_balance - output_note_asset.unwrap_fungible().amount().inner() ); Ok(()) @@ -213,7 +218,7 @@ async fn test_multisig_2_of_2_with_note_creation( /// **Tested combinations:** (0,1), (0,2), (0,3), (1,2), (1,3), (2,3) #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_2_of_4_all_signer_combinations( #[case] auth_scheme: AuthScheme, @@ -257,7 +262,7 @@ async fn test_multisig_2_of_4_all_signer_combinations( let tx_summary = match tx_context_init.execute().await.unwrap_err() { TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), + error => anyhow::bail!("expected abort with tx effects: {error}"), }; // Get signatures from the specific combination of signers @@ -303,7 +308,7 @@ async fn test_multisig_2_of_4_all_signer_combinations( /// - 1 Multisig Contract #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_replay_protection(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { // Setup keys and authenticators (3 approvers, but only 2 signers) @@ -354,12 +359,12 @@ async fn test_multisig_replay_protection(#[case] auth_scheme: AuthScheme) -> any .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) .auth_args(salt) - .build()?; - - let executed_tx = tx_context_execute.execute().await.expect("First transaction should succeed"); + .build()? + .execute() + .await?; // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_tx)?; + mock_chain.add_pending_executed_transaction(&tx_context_execute)?; mock_chain.prove_next_block()?; // Attempt to execute the same transaction again - should fail due to replay protection @@ -391,7 +396,7 @@ async fn test_multisig_replay_protection(#[case] auth_scheme: AuthScheme) -> any /// - 1 Transaction Script calling multisig procedures #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { let (_secret_keys, auth_schemes, public_keys, authenticators) = @@ -464,7 +469,7 @@ async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; @@ -516,8 +521,7 @@ async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow .extend_advice_inputs(advice_inputs) .build()? .execute() - .await - .unwrap(); + .await?; // Verify the transaction executed successfully assert_eq!(update_approvers_tx.account_delta().nonce_delta(), Felt::new(1)); @@ -603,7 +607,7 @@ async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow vec![output_note_asset], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::empty()), + &mut RandomCoin::new(Word::empty()), )?; // Create a new spawn note for the second transaction @@ -614,13 +618,13 @@ async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow // Build the new mock chain with the updated account and notes let mut new_mock_chain_builder = MockChainBuilder::with_accounts([updated_multisig_account.clone()]).unwrap(); - new_mock_chain_builder.add_output_note(OutputNote::Full(input_note_new.clone())); + new_mock_chain_builder.add_output_note(RawOutputNote::Full(input_note_new.clone())); let new_mock_chain = new_mock_chain_builder.build().unwrap(); // Execute transaction without signatures first to get tx summary let tx_context_init_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt_new) .build()?; @@ -649,7 +653,7 @@ async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow // Execute transaction with new signatures - should succeed let tx_context_execute_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note_new)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_new)]) .add_signature(new_public_keys[0].to_commitment(), msg_new, sig_1_new) .add_signature(new_public_keys[1].to_commitment(), msg_new, sig_2_new) .add_signature(new_public_keys[2].to_commitment(), msg_new, sig_3_new) @@ -678,7 +682,7 @@ async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow /// - 1 Transaction Script calling multisig procedures #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_update_signers_remove_owner( #[case] auth_scheme: AuthScheme, @@ -729,7 +733,7 @@ async fn test_multisig_update_signers_remove_owner( // Create transaction script let tx_script = CodeBuilder::default() .with_dynamically_linked_library(multisig_library())? - .compile_tx_script("begin\n call.::multisig::update_signers_and_threshold\nend")?; + .compile_tx_script("begin\n call.::miden::standards::components::auth::multisig::update_signers_and_threshold\nend")?; let advice_inputs = AdviceInputs { map: advice_map, ..Default::default() }; @@ -779,8 +783,7 @@ async fn test_multisig_update_signers_remove_owner( .extend_advice_inputs(advice_inputs) .build()? .execute() - .await - .unwrap(); + .await?; // Verify transaction success assert_eq!(update_approvers_tx.account_delta().nonce_delta(), Felt::new(1)); @@ -864,6 +867,78 @@ async fn test_multisig_update_signers_remove_owner( Ok(()) } +/// Tests that signer updates are rejected when stored procedure threshold overrides would become +/// unreachable for the new signer set. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_update_signers_rejects_unreachable_proc_thresholds( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, _authenticators) = + setup_keys_and_authenticators_with_scheme(3, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + // Configure a procedure override that is valid for the initial signer set (3-of-3), + // but invalid after updating to 2 signers. + let multisig_account = + create_multisig_account(2, &approvers, 10, vec![(BasicWallet::receive_asset_digest(), 3)])?; + + let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + + let new_public_keys = &public_keys[0..2]; + let threshold = 2u64; + let num_of_approvers = 2u64; + + let mut config_and_pubkeys_vector = + vec![Felt::new(threshold), Felt::new(num_of_approvers), Felt::new(0), Felt::new(0)]; + + for public_key in new_public_keys.iter().rev() { + let key_word: Word = public_key.to_commitment().into(); + config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); + } + + let multisig_config_hash = Hasher::hash_elements(&config_and_pubkeys_vector); + let mut advice_map = AdviceMap::default(); + advice_map.insert(multisig_config_hash, config_and_pubkeys_vector); + + let tx_script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script("begin\n call.::miden::standards::components::auth::multisig::update_signers_and_threshold\nend")?; + + let advice_inputs = AdviceInputs { map: advice_map, ..Default::default() }; + let salt = Word::from([Felt::new(8); 4]); + + let result = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(tx_script) + .tx_script_args(multisig_config_hash) + .extend_advice_inputs(advice_inputs) + .auth_args(salt) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS); + + Ok(()) +} + /// Tests that newly added approvers cannot sign transactions before the signer update is executed. /// /// This is a regression test to ensure that unauthorized parties cannot add their own public keys @@ -877,7 +952,7 @@ async fn test_multisig_update_signers_remove_owner( /// 4. Verify that only the CURRENT approvers can sign the update transaction #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_new_approvers_cannot_sign_before_update( #[case] auth_scheme: AuthScheme, @@ -947,7 +1022,7 @@ async fn test_multisig_new_approvers_cannot_sign_before_update( // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; @@ -1025,7 +1100,7 @@ async fn test_multisig_new_approvers_cannot_sign_before_update( /// 2. Send a note only when both approvers sign the transaction (default threshold) #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_multisig_proc_threshold_overrides( #[case] auth_scheme: AuthScheme, @@ -1110,7 +1185,7 @@ async fn test_multisig_proc_threshold_overrides( vec![FungibleAsset::mock(5)], NoteType::Public, Default::default(), - &mut RpoRandomCoin::new(Word::from([Felt::new(42); 4])), + &mut RandomCoin::new(Word::from([Felt::new(42); 4])), )?; let multisig_account_interface = AccountInterface::from_account(&multisig_account); let send_note_transaction_script = @@ -1119,7 +1194,7 @@ async fn test_multisig_proc_threshold_overrides( // Execute transaction without signatures to get tx summary let tx_context_init = mock_chain .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .tx_script(send_note_transaction_script.clone()) .auth_args(salt2) .build()?; @@ -1139,7 +1214,7 @@ async fn test_multisig_proc_threshold_overrides( // Try to execute with only 1 signature - should FAIL let tx_context_one_sig = mock_chain .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .add_signature(public_keys[0].to_commitment(), msg2, sig_1) .tx_script(send_note_transaction_script.clone()) .auth_args(salt2) @@ -1166,7 +1241,7 @@ async fn test_multisig_proc_threshold_overrides( // Execute with 2 signatures - should SUCCEED let result = mock_chain .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .add_signature(public_keys[0].to_commitment(), msg2, sig_1) .add_signature(public_keys[1].to_commitment(), msg2, sig_2) .auth_args(salt2) @@ -1186,3 +1261,251 @@ async fn test_multisig_proc_threshold_overrides( Ok(()) } + +/// Tests setting a per-procedure threshold override and clearing it via `proc_threshold == 0`. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_set_procedure_threshold( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let mut multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; + let mut mock_chain_builder = + MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); + let one_sig_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + multisig_account.id(), + &[FungibleAsset::mock(1)], + NoteType::Public, + )?; + let clear_check_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + multisig_account.id(), + &[FungibleAsset::mock(1)], + NoteType::Public, + )?; + let mut mock_chain = mock_chain_builder.build().unwrap(); + let proc_root = BasicWallet::receive_asset_digest(); + + let set_script_code = format!( + r#" + begin + push.{proc_root} + push.1 + call.::miden::standards::components::auth::multisig::set_procedure_threshold + dropw + drop + end + "# + ); + let set_script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script(set_script_code)?; + + // 1) Set override to 1 (requires default 2 signatures). + let set_salt = Word::from([Felt::new(50); 4]); + + let set_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(set_script.clone()) + .auth_args(set_salt) + .build()?; + let set_summary = match set_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let set_msg = set_summary.as_ref().to_commitment(); + let set_summary = SigningInputs::TransactionSummary(set_summary); + let set_sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &set_summary) + .await?; + let set_sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &set_summary) + .await?; + + let set_tx = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(set_script) + .add_signature(public_keys[0].to_commitment(), set_msg, set_sig_1) + .add_signature(public_keys[1].to_commitment(), set_msg, set_sig_2) + .auth_args(set_salt) + .build()? + .execute() + .await?; + + multisig_account.apply_delta(set_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&set_tx)?; + mock_chain.prove_next_block()?; + + // 2) Verify receive_asset can now execute with one signature. + let one_sig_salt = Word::from([Felt::new(51); 4]); + + let one_sig_init = mock_chain + .build_tx_context(multisig_account.id(), &[one_sig_note.id()], &[])? + .auth_args(one_sig_salt) + .build()?; + let one_sig_summary = match one_sig_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let one_sig_msg = one_sig_summary.as_ref().to_commitment(); + let one_sig_summary = SigningInputs::TransactionSummary(one_sig_summary); + let one_sig = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &one_sig_summary) + .await?; + + let one_sig_tx = mock_chain + .build_tx_context(multisig_account.id(), &[one_sig_note.id()], &[])? + .add_signature(public_keys[0].to_commitment(), one_sig_msg, one_sig) + .auth_args(one_sig_salt) + .build()? + .execute() + .await + .expect("override=1 should allow receive_asset with one signature"); + multisig_account.apply_delta(one_sig_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&one_sig_tx)?; + mock_chain.prove_next_block()?; + + // 3) Clear override by setting threshold to zero. + let clear_script_code = format!( + r#" + begin + push.{proc_root} + push.0 + call.::miden::standards::components::auth::multisig::set_procedure_threshold + dropw + drop + end + "# + ); + let clear_script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script(clear_script_code)?; + let clear_salt = Word::from([Felt::new(52); 4]); + + let clear_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(clear_script.clone()) + .auth_args(clear_salt) + .build()?; + let clear_summary = match clear_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let clear_msg = clear_summary.as_ref().to_commitment(); + let clear_summary = SigningInputs::TransactionSummary(clear_summary); + let clear_sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &clear_summary) + .await?; + let clear_sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &clear_summary) + .await?; + + let clear_tx = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(clear_script) + .add_signature(public_keys[0].to_commitment(), clear_msg, clear_sig_1) + .add_signature(public_keys[1].to_commitment(), clear_msg, clear_sig_2) + .auth_args(clear_salt) + .build()? + .execute() + .await?; + + multisig_account.apply_delta(clear_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&clear_tx)?; + mock_chain.prove_next_block()?; + + // 4) After clear, one signature should no longer be sufficient for receive_asset. + let clear_check_salt = Word::from([Felt::new(53); 4]); + + let clear_check_init = mock_chain + .build_tx_context(multisig_account.id(), &[clear_check_note.id()], &[])? + .auth_args(clear_check_salt) + .build()?; + let clear_check_summary = match clear_check_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let clear_check_msg = clear_check_summary.as_ref().to_commitment(); + let clear_check_summary = SigningInputs::TransactionSummary(clear_check_summary); + let clear_check_sig = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &clear_check_summary) + .await?; + + let clear_check_result = mock_chain + .build_tx_context(multisig_account.id(), &[clear_check_note.id()], &[])? + .add_signature(public_keys[0].to_commitment(), clear_check_msg, clear_check_sig) + .auth_args(clear_check_salt) + .build()? + .execute() + .await; + + assert!( + matches!(clear_check_result, Err(TransactionExecutorError::Unauthorized(_))), + "override cleared via threshold=0 should restore default threshold requirements" + ); + + Ok(()) +} + +/// Tests setting an override threshold above num_approvers is rejected. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_set_procedure_threshold_rejects_exceeding_approvers( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, _authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; + let proc_root = BasicWallet::receive_asset_digest(); + + let script_code = format!( + r#" + begin + push.{proc_root} + push.3 + call.::miden::standards::components::auth::multisig::set_procedure_threshold + end + "# + ); + let script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script(script_code)?; + + let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + let salt = Word::from([Felt::new(54); 4]); + + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(script.clone()) + .auth_args(salt) + .build()?; + + let result = tx_context_init.execute().await; + + assert_transaction_executor_error!(result, ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS); + + Ok(()) +} diff --git a/crates/miden-testing/tests/auth/multisig_psm.rs b/crates/miden-testing/tests/auth/multisig_psm.rs new file mode 100644 index 0000000000..a30eac8dac --- /dev/null +++ b/crates/miden-testing/tests/auth/multisig_psm.rs @@ -0,0 +1,531 @@ +use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKey}; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountId, + AccountStorageMode, + AccountType, +}; +use miden_protocol::asset::FungibleAsset; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteRecipient, NoteStorage, NoteType}; +use miden_protocol::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, +}; +use miden_protocol::transaction::RawOutputNote; +use miden_protocol::{Felt, Word}; +use miden_standards::account::auth::{AuthMultisigPsm, AuthMultisigPsmConfig, PsmConfig}; +use miden_standards::account::components::multisig_psm_library; +use miden_standards::account::wallets::BasicWallet; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::errors::standards::{ + ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE, + ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES, +}; +use miden_testing::{MockChainBuilder, assert_transaction_executor_error}; +use miden_tx::TransactionExecutorError; +use miden_tx::auth::{BasicAuthenticator, SigningInputs, TransactionAuthenticator}; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; +use rstest::rstest; + +// ================================================================================================ +// HELPER FUNCTIONS +// ================================================================================================ + +type MultisigTestSetup = + (Vec, Vec, Vec, Vec); + +/// Sets up secret keys, public keys, and authenticators for multisig testing for the given scheme. +fn setup_keys_and_authenticators_with_scheme( + num_approvers: usize, + threshold: usize, + auth_scheme: AuthScheme, +) -> anyhow::Result { + let seed: [u8; 32] = rand::random(); + let mut rng = ChaCha20Rng::from_seed(seed); + + let mut secret_keys = Vec::new(); + let mut auth_schemes = Vec::new(); + let mut public_keys = Vec::new(); + let mut authenticators = Vec::new(); + + for _ in 0..num_approvers { + let sec_key = match auth_scheme { + AuthScheme::EcdsaK256Keccak => AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng), + AuthScheme::Falcon512Poseidon2 => { + AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng) + }, + _ => anyhow::bail!("unsupported auth scheme for this test: {auth_scheme:?}"), + }; + let pub_key = sec_key.public_key(); + + secret_keys.push(sec_key); + auth_schemes.push(auth_scheme); + public_keys.push(pub_key); + } + + // Create authenticators for required signers + for secret_key in secret_keys.iter().take(threshold) { + let authenticator = BasicAuthenticator::new(core::slice::from_ref(secret_key)); + authenticators.push(authenticator); + } + + Ok((secret_keys, auth_schemes, public_keys, authenticators)) +} + +/// Creates a multisig account configured with a private state manager signer. +fn create_multisig_account_with_psm( + threshold: u32, + approvers: &[(PublicKey, AuthScheme)], + psm: PsmConfig, + asset_amount: u64, + proc_threshold_map: Vec<(Word, u32)>, +) -> anyhow::Result { + let approvers = approvers + .iter() + .map(|(pub_key, auth_scheme)| (pub_key.to_commitment(), *auth_scheme)) + .collect(); + + let config = AuthMultisigPsmConfig::new(approvers, threshold, psm)? + .with_proc_thresholds(proc_threshold_map)?; + + let multisig_account = AccountBuilder::new([0; 32]) + .with_auth_component(AuthMultisigPsm::new(config)?) + .with_component(BasicWallet) + .account_type(AccountType::RegularAccountUpdatableCode) + .storage_mode(AccountStorageMode::Public) + .with_assets(vec![FungibleAsset::mock(asset_amount)]) + .build_existing()?; + + Ok(multisig_account) +} + +// ================================================================================================ +// TESTS +// ================================================================================================ + +/// Tests that multisig authentication requires an additional PSM signature when +/// configured. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_psm_signature_required( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let psm_secret_key = AuthSecretKey::new_ecdsa_k256_keccak(); + let psm_public_key = psm_secret_key.public_key(); + let psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&psm_secret_key)); + + let mut multisig_account = create_multisig_account_with_psm( + 2, + &approvers, + PsmConfig::new(psm_public_key.to_commitment(), AuthScheme::EcdsaK256Keccak), + 10, + vec![], + )?; + + let output_note_asset = FungibleAsset::mock(0); + let mut mock_chain_builder = + MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); + + let output_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), + &[output_note_asset], + NoteType::Public, + )?; + let input_note = mock_chain_builder.add_spawn_note([&output_note])?; + let mut mock_chain = mock_chain_builder.build().unwrap(); + + let salt = Word::from([Felt::new(777); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) + .auth_args(salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + let msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + // Missing PSM signature must fail. + let without_psm_result = mock_chain + .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) + .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) + .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) + .auth_args(salt) + .build()? + .execute() + .await; + assert!(matches!(without_psm_result, Err(TransactionExecutorError::Unauthorized(_)))); + + let psm_signature = psm_authenticator + .get_signature(psm_public_key.to_commitment(), &tx_summary_signing) + .await?; + + // With PSM signature the transaction should succeed. + let tx_context_execute = mock_chain + .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) + .add_signature(public_keys[0].to_commitment(), msg, sig_1) + .add_signature(public_keys[1].to_commitment(), msg, sig_2) + .add_signature(psm_public_key.to_commitment(), msg, psm_signature) + .auth_args(salt) + .build()? + .execute() + .await?; + + multisig_account.apply_delta(tx_context_execute.account_delta())?; + + mock_chain.add_pending_executed_transaction(&tx_context_execute)?; + mock_chain.prove_next_block()?; + + assert_eq!( + multisig_account + .vault() + .get_balance(AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?)?, + 10 - output_note_asset.unwrap_fungible().amount().inner() + ); + + Ok(()) +} + +/// Tests that the PSM public key can be updated and then enforced. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_update_psm_public_key( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let old_psm_secret_key = AuthSecretKey::new_ecdsa_k256_keccak(); + let old_psm_public_key = old_psm_secret_key.public_key(); + let old_psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&old_psm_secret_key)); + + let new_psm_secret_key = AuthSecretKey::new_falcon512_poseidon2(); + let new_psm_public_key = new_psm_secret_key.public_key(); + let new_psm_auth_scheme = new_psm_secret_key.auth_scheme(); + let new_psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&new_psm_secret_key)); + + let multisig_account = create_multisig_account_with_psm( + 2, + &approvers, + PsmConfig::new(old_psm_public_key.to_commitment(), AuthScheme::EcdsaK256Keccak), + 10, + vec![], + )?; + + let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + + let new_psm_key_word: Word = new_psm_public_key.to_commitment().into(); + let new_psm_scheme_id = new_psm_auth_scheme as u32; + let update_psm_script = CodeBuilder::new() + .with_dynamically_linked_library(multisig_psm_library())? + .compile_tx_script(format!( + "begin\n push.{new_psm_key_word}\n push.{new_psm_scheme_id}\n call.::miden::standards::components::auth::multisig_psm::update_psm_public_key\n drop\n dropw\nend" + ))?; + + let update_salt = Word::from([Felt::new(991); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_script.clone()) + .auth_args(update_salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + + let update_msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + // PSM key rotation intentionally skips PSM signature for this update tx. + let update_psm_tx = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_script) + .add_signature(public_keys[0].to_commitment(), update_msg, sig_1) + .add_signature(public_keys[1].to_commitment(), update_msg, sig_2) + .auth_args(update_salt) + .build()? + .execute() + .await?; + + let mut updated_multisig_account = multisig_account.clone(); + updated_multisig_account.apply_delta(update_psm_tx.account_delta())?; + let updated_psm_public_key = updated_multisig_account + .storage() + .get_map_item(AuthMultisigPsm::psm_public_key_slot(), Word::empty())?; + assert_eq!(updated_psm_public_key, Word::from(new_psm_public_key.to_commitment())); + let updated_psm_scheme_id = updated_multisig_account + .storage() + .get_map_item(AuthMultisigPsm::psm_scheme_id_slot(), Word::from([0u32, 0, 0, 0]))?; + assert_eq!( + updated_psm_scheme_id, + Word::from([new_psm_auth_scheme as u32, 0u32, 0u32, 0u32]) + ); + + mock_chain.add_pending_executed_transaction(&update_psm_tx)?; + mock_chain.prove_next_block()?; + + // Build one tx summary after key update. Old PSM must fail and new PSM must pass on this same + // transaction. + let next_salt = Word::from([Felt::new(992); 4]); + let tx_context_init_next = mock_chain + .build_tx_context(updated_multisig_account.id(), &[], &[])? + .auth_args(next_salt) + .build()?; + + let tx_summary_next = match tx_context_init_next.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + let next_msg = tx_summary_next.as_ref().to_commitment(); + let tx_summary_next_signing = SigningInputs::TransactionSummary(tx_summary_next); + + let next_sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_next_signing) + .await?; + let next_sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_next_signing) + .await?; + let old_psm_sig_next = old_psm_authenticator + .get_signature(old_psm_public_key.to_commitment(), &tx_summary_next_signing) + .await?; + let new_psm_sig_next = new_psm_authenticator + .get_signature(new_psm_public_key.to_commitment(), &tx_summary_next_signing) + .await?; + + // Old PSM signature must fail after key update. + let with_old_psm_result = mock_chain + .build_tx_context(updated_multisig_account.id(), &[], &[])? + .add_signature(public_keys[0].to_commitment(), next_msg, next_sig_1.clone()) + .add_signature(public_keys[1].to_commitment(), next_msg, next_sig_2.clone()) + .add_signature(old_psm_public_key.to_commitment(), next_msg, old_psm_sig_next) + .auth_args(next_salt) + .build()? + .execute() + .await; + assert!(matches!(with_old_psm_result, Err(TransactionExecutorError::Unauthorized(_)))); + + // New PSM signature must pass. + mock_chain + .build_tx_context(updated_multisig_account.id(), &[], &[])? + .add_signature(public_keys[0].to_commitment(), next_msg, next_sig_1) + .add_signature(public_keys[1].to_commitment(), next_msg, next_sig_2) + .add_signature(new_psm_public_key.to_commitment(), next_msg, new_psm_sig_next) + .auth_args(next_salt) + .build()? + .execute() + .await?; + + Ok(()) +} + +/// Tests that `update_psm_public_key` must be the only account action in the transaction. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_update_psm_public_key_must_be_called_alone( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let old_psm_secret_key = AuthSecretKey::new_ecdsa_k256_keccak(); + let old_psm_public_key = old_psm_secret_key.public_key(); + let old_psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&old_psm_secret_key)); + + let new_psm_secret_key = AuthSecretKey::new_falcon512_poseidon2(); + let new_psm_public_key = new_psm_secret_key.public_key(); + let new_psm_auth_scheme = new_psm_secret_key.auth_scheme(); + + let multisig_account = create_multisig_account_with_psm( + 2, + &approvers, + PsmConfig::new(old_psm_public_key.to_commitment(), AuthScheme::EcdsaK256Keccak), + 10, + vec![], + )?; + + let new_psm_key_word: Word = new_psm_public_key.to_commitment().into(); + let new_psm_scheme_id = new_psm_auth_scheme as u32; + let update_psm_script = CodeBuilder::new() + .with_dynamically_linked_library(multisig_psm_library())? + .compile_tx_script(format!( + "begin\n push.{new_psm_key_word}\n push.{new_psm_scheme_id}\n call.::miden::standards::components::auth::multisig_psm::update_psm_public_key\n drop\n dropw\nend" + ))?; + + let mut mock_chain_builder = + MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); + let receive_asset_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + multisig_account.id(), + &[FungibleAsset::mock(1)], + NoteType::Public, + )?; + let mock_chain = mock_chain_builder.build().unwrap(); + + let salt = Word::from([Felt::new(993); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[receive_asset_note.id()], &[])? + .tx_script(update_psm_script.clone()) + .auth_args(salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + + let msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + let without_psm_result = mock_chain + .build_tx_context(multisig_account.id(), &[receive_asset_note.id()], &[])? + .tx_script(update_psm_script.clone()) + .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) + .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) + .auth_args(salt) + .build()? + .execute() + .await; + assert_transaction_executor_error!(without_psm_result, ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE); + + let old_psm_signature = old_psm_authenticator + .get_signature(old_psm_public_key.to_commitment(), &tx_summary_signing) + .await?; + + let with_psm_result = mock_chain + .build_tx_context(multisig_account.id(), &[receive_asset_note.id()], &[])? + .tx_script(update_psm_script) + .add_signature(public_keys[0].to_commitment(), msg, sig_1) + .add_signature(public_keys[1].to_commitment(), msg, sig_2) + .add_signature(old_psm_public_key.to_commitment(), msg, old_psm_signature) + .auth_args(salt) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(with_psm_result, ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE); + + // Also reject rotation transactions that touch notes even when no other account procedure is + // called. + let note_script = CodeBuilder::default().compile_note_script("begin nop end")?; + let note_serial_num = Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]); + let note_recipient = + NoteRecipient::new(note_serial_num, note_script.clone(), NoteStorage::default()); + let output_note = Note::new( + NoteAssets::new(vec![])?, + NoteMetadata::new(multisig_account.id(), NoteType::Public), + note_recipient, + ); + + let new_psm_key_word: Word = new_psm_public_key.to_commitment().into(); + let new_psm_scheme_id = new_psm_auth_scheme as u32; + let update_psm_with_output_script = CodeBuilder::new() + .with_dynamically_linked_library(multisig_psm_library())? + .compile_tx_script(format!( + "use miden::protocol::output_note\nbegin\n push.{recipient}\n push.{note_type}\n push.{tag}\n exec.output_note::create\n swapdw\n dropw\n dropw\n push.{new_psm_key_word}\n push.{new_psm_scheme_id}\n call.::miden::standards::components::auth::multisig_psm::update_psm_public_key\n drop\n dropw\nend", + recipient = output_note.recipient().digest(), + note_type = NoteType::Public as u8, + tag = Felt::from(output_note.metadata().tag()), + ))?; + + let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + + let salt = Word::from([Felt::new(994); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_with_output_script.clone()) + .add_note_script(note_script.clone()) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) + .auth_args(salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + + let msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + let result = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_with_output_script) + .add_note_script(note_script) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) + .add_signature(public_keys[0].to_commitment(), msg, sig_1) + .add_signature(public_keys[1].to_commitment(), msg, sig_2) + .auth_args(salt) + .build()? + .execute() + .await; + + assert_transaction_executor_error!( + result, + ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES + ); + + Ok(()) +} diff --git a/crates/miden-testing/tests/auth/singlesig_acl.rs b/crates/miden-testing/tests/auth/singlesig_acl.rs index 27260317d8..04d97cd3d2 100644 --- a/crates/miden-testing/tests/auth/singlesig_acl.rs +++ b/crates/miden-testing/tests/auth/singlesig_acl.rs @@ -12,8 +12,8 @@ use miden_protocol::account::{ }; use miden_protocol::note::Note; use miden_protocol::testing::storage::MOCK_VALUE_SLOT0; -use miden_protocol::transaction::OutputNote; -use miden_protocol::{Felt, FieldElement, Word}; +use miden_protocol::transaction::RawOutputNote; +use miden_protocol::{Felt, Word}; use miden_standards::account::auth::AuthSingleSigAcl; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; @@ -77,7 +77,7 @@ fn setup_acl_test( let note = NoteBuilder::new(account.id(), &mut rand::rng()) .build() .expect("failed to create mock note"); - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mock_chain = builder.build()?; Ok((account, mock_chain, note)) @@ -85,7 +85,7 @@ fn setup_acl_test( #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_acl(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { let (account, mock_chain, note) = setup_acl_test(false, true, auth_scheme)?; @@ -161,7 +161,7 @@ async fn test_acl(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { .execute() .await .expect("trigger 1 with auth should succeed"); - prove_and_verify_transaction(executed_tx_with_auth_1)?; + prove_and_verify_transaction(executed_tx_with_auth_1).await?; // Test 2: Transaction WITH authenticator calling trigger procedure 2 (should succeed) let tx_context_with_auth_2 = mock_chain @@ -208,7 +208,7 @@ async fn test_acl(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_acl_with_allow_unauthorized_output_notes( #[case] auth_scheme: AuthScheme, @@ -253,7 +253,7 @@ async fn test_acl_with_allow_unauthorized_output_notes( #[rstest] #[case::ecdsa(AuthScheme::EcdsaK256Keccak)] -#[case::falcon(AuthScheme::Falcon512Rpo)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] async fn test_acl_with_disallow_unauthorized_input_notes( #[case] auth_scheme: AuthScheme, diff --git a/crates/miden-testing/tests/lib.rs b/crates/miden-testing/tests/lib.rs index ef884c34a8..b27b9a00d0 100644 --- a/crates/miden-testing/tests/lib.rs +++ b/crates/miden-testing/tests/lib.rs @@ -5,7 +5,6 @@ mod auth; mod scripts; mod wallet; -use miden_processor::utils::Deserializable; use miden_protocol::Word; use miden_protocol::account::AccountId; use miden_protocol::asset::FungibleAsset; @@ -13,6 +12,7 @@ use miden_protocol::crypto::utils::Serializable; use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteRecipient, NoteStorage, NoteType}; use miden_protocol::testing::account_id::ACCOUNT_ID_SENDER; use miden_protocol::transaction::{ExecutedTransaction, ProvenTransaction}; +use miden_protocol::utils::serde::Deserializable; use miden_standards::code_builder::CodeBuilder; use miden_tx::{ LocalTransactionProver, @@ -25,7 +25,7 @@ use miden_tx::{ // ================================================================================================ #[cfg(test)] -pub fn prove_and_verify_transaction( +pub async fn prove_and_verify_transaction( executed_transaction: ExecutedTransaction, ) -> Result<(), TransactionVerifierError> { use miden_protocol::transaction::TransactionHeader; @@ -36,7 +36,7 @@ pub fn prove_and_verify_transaction( let proof_options = ProvingOptions::default(); let prover = LocalTransactionProver::new(proof_options); - let proven_transaction = prover.prove(executed_transaction).unwrap(); + let proven_transaction = prover.prove(executed_transaction).await.unwrap(); let proven_tx_header = TransactionHeader::from(&proven_transaction); assert_eq!(proven_transaction.id(), executed_transaction_id); diff --git a/crates/miden-testing/tests/scripts/faucet.rs b/crates/miden-testing/tests/scripts/faucet.rs index 15d4b6c62b..06e417c937 100644 --- a/crates/miden-testing/tests/scripts/faucet.rs +++ b/crates/miden-testing/tests/scripts/faucet.rs @@ -3,7 +3,7 @@ extern crate alloc; use alloc::sync::Arc; use core::slice; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RandomCoin; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{ Account, @@ -26,24 +26,27 @@ use miden_protocol::note::{ NoteType, }; use miden_protocol::testing::account_id::ACCOUNT_ID_PRIVATE_SENDER; -use miden_protocol::transaction::{ExecutedTransaction, OutputNote}; +use miden_protocol::transaction::{ExecutedTransaction, RawOutputNote}; use miden_protocol::{Felt, Word}; +use miden_standards::account::access::Ownable2Step; use miden_standards::account::faucets::{ BasicFungibleFaucet, NetworkFungibleFaucet, TokenMetadata, }; +use miden_standards::account::mint_policies::OwnerControlledInitConfig; use miden_standards::code_builder::CodeBuilder; use miden_standards::errors::standards::{ ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY, ERR_FUNGIBLE_ASSET_DISTRIBUTE_AMOUNT_EXCEEDS_MAX_SUPPLY, + ERR_MINT_POLICY_ROOT_NOT_ALLOWED, ERR_SENDER_NOT_OWNER, }; use miden_standards::note::{BurnNote, MintNote, MintNoteStorage, StandardNote}; use miden_standards::testing::note::NoteBuilder; +use miden_testing::utils::create_p2id_note_exact; use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; -use crate::scripts::swap::create_p2id_note_exact; use crate::{get_note_with_fungible_asset_and_script, prove_and_verify_transaction}; // Shared test utilities for faucet tests @@ -71,7 +74,7 @@ pub fn create_mint_script_code(params: &FaucetTestParams) -> String { push.{amount} # => [amount, tag, note_type, RECIPIENT, pad(9)] - call.::miden::standards::faucets::basic_fungible::distribute + call.::miden::standards::faucets::basic_fungible::mint_and_send # => [note_idx, pad(15)] # truncate the stack @@ -110,7 +113,8 @@ pub fn verify_minted_output_note( faucet: &Account, params: &FaucetTestParams, ) -> anyhow::Result<()> { - let fungible_asset: Asset = FungibleAsset::new(faucet.id(), params.amount.into())?.into(); + let fungible_asset: Asset = + FungibleAsset::new(faucet.id(), params.amount.as_canonical_u64())?.into(); let output_note = executed_transaction.output_notes().get_note(0).clone(); let assets = NoteAssets::new(vec![fungible_asset])?; @@ -125,6 +129,29 @@ pub fn verify_minted_output_note( Ok(()) } +async fn execute_faucet_note_script( + mock_chain: &MockChain, + faucet_id: AccountId, + sender_account_id: AccountId, + note_script_code: &str, + rng_seed: u32, +) -> anyhow::Result> { + let source_manager = Arc::new(DefaultSourceManager::default()); + + let mut rng = RandomCoin::new([Felt::from(rng_seed); 4].into()); + let note = NoteBuilder::new(sender_account_id, &mut rng) + .note_type(NoteType::Private) + .code(note_script_code) + .build()?; + + let tx_context = mock_chain + .build_tx_context(faucet_id, &[], &[note])? + .with_source_manager(source_manager) + .build()?; + + Ok(tx_context.execute().await) +} + // TESTS MINT FUNGIBLE ASSET // ================================================================================================ @@ -133,7 +160,9 @@ pub fn verify_minted_output_note( async fn minting_fungible_asset_on_existing_faucet_succeeds() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let faucet = builder.add_existing_basic_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "TST", 200, None, @@ -154,14 +183,16 @@ async fn minting_fungible_asset_on_existing_faucet_succeeds() -> anyhow::Result< Ok(()) } -/// Tests that distribute fails when the minted amount would exceed the max supply. +/// Tests that mint fails when the minted amount would exceed the max supply. #[tokio::test] async fn faucet_contract_mint_fungible_asset_fails_exceeds_max_supply() -> anyhow::Result<()> { // CONSTRUCT AND EXECUTE TX (Failure) // -------------------------------------------------------------------------------------------- let mut builder = MockChain::builder(); let faucet = builder.add_existing_basic_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "TST", 200, None, @@ -184,7 +215,7 @@ async fn faucet_contract_mint_fungible_asset_fails_exceeds_max_supply() -> anyho push.{amount} # => [amount, tag, note_type, RECIPIENT, pad(9)] - call.::miden::standards::faucets::basic_fungible::distribute + call.::miden::standards::faucets::basic_fungible::mint_and_send # => [note_idx, pad(15)] # truncate the stack @@ -216,7 +247,9 @@ async fn faucet_contract_mint_fungible_asset_fails_exceeds_max_supply() -> anyho async fn minting_fungible_asset_on_new_faucet_succeeds() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let faucet = builder.create_new_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "TST", 200, )?; @@ -247,7 +280,9 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R let mut builder = MockChain::builder(); let faucet = builder.add_existing_basic_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "TST", max_supply.into(), Some(token_supply.into()), @@ -263,16 +298,13 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R # => [] call.::miden::standards::faucets::basic_fungible::burn - # => [ASSET] - - # truncate the stack - dropw + # => [pad(16)] end "; let note = get_note_with_fungible_asset_and_script(fungible_asset, burn_note_script_code); - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mock_chain = builder.build()?; let token_metadata = TokenMetadata::try_from(faucet.storage())?; @@ -295,7 +327,7 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R .await?; // Prove, serialize/deserialize and verify the transaction - prove_and_verify_transaction(executed_transaction.clone())?; + prove_and_verify_transaction(executed_transaction.clone()).await?; assert_eq!(executed_transaction.account_delta().nonce_delta(), Felt::new(1)); assert_eq!(executed_transaction.input_notes().get_note(0).id(), note.id()); @@ -310,7 +342,9 @@ async fn faucet_burn_fungible_asset_fails_amount_exceeds_token_supply() -> anyho let mut builder = MockChain::builder(); let faucet = builder.add_existing_basic_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "TST", max_supply.into(), Some(token_supply.into()), @@ -327,16 +361,13 @@ async fn faucet_burn_fungible_asset_fails_amount_exceeds_token_supply() -> anyho # => [] call.::miden::standards::faucets::basic_fungible::burn - # => [ASSET] - - # truncate the stack - dropw + # => [pad(16)] end "; let note = get_note_with_fungible_asset_and_script(fungible_asset, burn_note_script_code); - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mock_chain = builder.build()?; let tx = mock_chain @@ -355,13 +386,15 @@ async fn faucet_burn_fungible_asset_fails_amount_exceeds_token_supply() -> anyho /// Tests that a public note can be created during note consumption by fetching the note script /// from the data store. This test verifies the functionality added in issue #1972. /// -/// The test creates a note that calls the faucet's `distribute` function to create a PUBLIC +/// The test creates a note that calls the faucet's `mint` function to create a PUBLIC /// P2ID output note. The P2ID script is fetched from the data store during transaction execution. #[tokio::test] async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let faucet = builder.add_existing_basic_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "TST", 200, None, @@ -400,7 +433,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul let output_script_root = note_recipient.script().root(); - let asset = FungibleAsset::new(faucet.id(), amount.into())?; + let asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let metadata = NoteMetadata::new(faucet.id(), note_type).with_tag(tag); let expected_note = Note::new(NoteAssets::new(vec![asset.into()])?, metadata, note_recipient); @@ -431,13 +464,13 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul exec.note::build_recipient # => [RECIPIENT] - # Now call distribute with the computed recipient + # Now call mint with the computed recipient push.{note_type} push.{tag} push.{amount} # => [amount, tag, note_type, RECIPIENT] - call.::miden::standards::faucets::basic_fungible::distribute + call.::miden::standards::faucets::basic_fungible::mint_and_send # => [note_idx, pad(15)] # Truncate the stack @@ -458,8 +491,8 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul amount = amount, ); - // Create the trigger note that will call distribute - let mut rng = RpoRandomCoin::new([Felt::from(1u32); 4].into()); + // Create the trigger note that will call mint + let mut rng = RandomCoin::new([Felt::from(1u32); 4].into()); let trigger_note = NoteBuilder::new(faucet.id(), &mut rng) .note_type(NoteType::Private) .tag(NoteTag::default().into()) @@ -467,7 +500,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul .code(trigger_note_script_code) .build()?; - builder.add_output_note(OutputNote::Full(trigger_note.clone())); + builder.add_output_note(RawOutputNote::Full(trigger_note.clone())); let mock_chain = builder.build()?; // Execute the transaction - this should fetch the output note script from the data store. @@ -487,7 +520,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul // Extract the full note from the OutputNote enum let full_note = match output_note { - OutputNote::Full(note) => note, + RawOutputNote::Full(note) => note, _ => panic!("Expected OutputNote::Full variant"), }; @@ -495,7 +528,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul assert_eq!(full_note.metadata().note_type(), NoteType::Public); // Verify the output note contains the minted fungible asset - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; + let expected_asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let expected_asset_obj = Asset::from(expected_asset); assert!(full_note.assets().iter().any(|asset| asset == &expected_asset_obj)); @@ -546,6 +579,7 @@ async fn network_faucet_mint() -> anyhow::Result<()> { max_supply, faucet_owner_account_id, Some(token_supply), + OwnerControlledInitConfig::OwnerOnly, )?; // Create a target account to consume the minted note @@ -553,25 +587,30 @@ async fn network_faucet_mint() -> anyhow::Result<()> { // Check the Network Fungible Faucet's max supply. let actual_max_supply = TokenMetadata::try_from(faucet.storage())?.max_supply(); - assert_eq!(actual_max_supply.as_int(), max_supply); + assert_eq!(actual_max_supply.as_canonical_u64(), max_supply); - // Check that the creator account ID is stored in slot 2 (second storage slot of the component) - // The owner_account_id is stored as Word [0, 0, suffix, prefix] - let stored_owner_id = - faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot()).unwrap(); - assert_eq!(stored_owner_id[3], faucet_owner_account_id.prefix().as_felt()); - assert_eq!(stored_owner_id[2], Felt::new(faucet_owner_account_id.suffix().as_int())); + // Check that the creator account ID is stored in the ownership slot. + // Word: [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix] + let stored_owner_id = faucet.storage().get_item(Ownable2Step::slot_name()).unwrap(); + assert_eq!( + stored_owner_id[0], + Felt::new(faucet_owner_account_id.suffix().as_canonical_u64()) + ); + assert_eq!(stored_owner_id[1], faucet_owner_account_id.prefix().as_felt()); + assert_eq!(stored_owner_id[2], Felt::new(0)); // no nominated owner + assert_eq!(stored_owner_id[3], Felt::new(0)); // Check that the faucet's token supply has been correctly initialized. // The already issued amount should be 50. let initial_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); - assert_eq!(initial_token_supply.as_int(), token_supply); + assert_eq!(initial_token_supply.as_canonical_u64(), token_supply); // CREATE MINT NOTE USING STANDARD NOTE // -------------------------------------------------------------------------------------------- let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + let mint_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); let serial_num = Word::default(); let output_note_tag = NoteTag::with_account_target(target_account.id()); @@ -588,7 +627,7 @@ async fn network_faucet_mint() -> anyhow::Result<()> { // Create the MINT note using the helper function let mint_storage = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); - let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(42u32); 4].into()); let mint_note = MintNote::create( faucet.id(), faucet_owner_account_id, @@ -598,7 +637,7 @@ async fn network_faucet_mint() -> anyhow::Result<()> { )?; // Add the MINT note to the mock chain - builder.add_output_note(OutputNote::Full(mint_note.clone())); + builder.add_output_note(RawOutputNote::Full(mint_note.clone())); let mut mock_chain = builder.build()?; // EXECUTE MINT NOTE AGAINST NETWORK FAUCET @@ -611,7 +650,7 @@ async fn network_faucet_mint() -> anyhow::Result<()> { let output_note = executed_transaction.output_notes().get_note(0); // Verify the output note contains the minted fungible asset - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; + let expected_asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let assets = NoteAssets::new(vec![expected_asset.into()])?; let expected_note_id = NoteId::new(recipient, assets.commitment()); @@ -635,7 +674,7 @@ async fn network_faucet_mint() -> anyhow::Result<()> { // Verify the account's vault now contains the expected fungible asset let balance = target_account.vault().get_balance(faucet.id())?; - assert_eq!(balance, expected_asset.amount(),); + assert_eq!(balance, expected_asset.amount().inner(),); Ok(()) } @@ -655,12 +694,18 @@ async fn test_network_faucet_owner_can_mint() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = builder.add_existing_network_faucet("NET", 1000, owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; let mock_chain = builder.build()?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into())?.into(); + let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?.into(); let output_note_tag = NoteTag::with_account_target(target_account.id()); let p2id_note = create_p2id_note_exact( @@ -674,7 +719,7 @@ async fn test_network_faucet_owner_can_mint() -> anyhow::Result<()> { let mint_inputs = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); - let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(42u32); 4].into()); let mint_note = MintNote::create( faucet.id(), owner_account_id, @@ -691,6 +736,56 @@ async fn test_network_faucet_owner_can_mint() -> anyhow::Result<()> { Ok(()) } +/// Tests that set_mint_policy rejects policy roots outside the allowed policy roots map. +#[tokio::test] +async fn test_network_faucet_set_policy_rejects_non_allowed_root() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let owner_account_id = AccountId::dummy( + [1; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + owner_account_id, + Some(0), + OwnerControlledInitConfig::OwnerOnly, + )?; + let mock_chain = builder.build()?; + + // This root exists in account code, but is not in the mint policy allowlist. + let invalid_policy_root = NetworkFungibleFaucet::mint_and_send_digest(); + let set_policy_note_script = format!( + r#" + use miden::standards::mint_policies::policy_manager->policy_manager + + begin + repeat.12 push.0 end + push.{invalid_policy_root} + call.policy_manager::set_mint_policy + dropw dropw dropw dropw + end + "# + ); + + let result = execute_faucet_note_script( + &mock_chain, + faucet.id(), + owner_account_id, + &set_policy_note_script, + 400, + ) + .await?; + + assert_transaction_executor_error!(result, ERR_MINT_POLICY_ROOT_NOT_ALLOWED); + + Ok(()) +} + /// Tests that a non-owner cannot mint assets on network faucet. #[tokio::test] async fn test_network_faucet_non_owner_cannot_mint() -> anyhow::Result<()> { @@ -710,12 +805,18 @@ async fn test_network_faucet_non_owner_cannot_mint() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = builder.add_existing_network_faucet("NET", 1000, owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; let mock_chain = builder.build()?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into())?.into(); + let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?.into(); let output_note_tag = NoteTag::with_account_target(target_account.id()); let p2id_note = create_p2id_note_exact( @@ -730,7 +831,7 @@ async fn test_network_faucet_non_owner_cannot_mint() -> anyhow::Result<()> { let mint_inputs = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); // Create mint note from NON-OWNER - let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(42u32); 4].into()); let mint_note = MintNote::create( faucet.id(), non_owner_account_id, @@ -742,7 +843,7 @@ async fn test_network_faucet_non_owner_cannot_mint() -> anyhow::Result<()> { let tx_context = mock_chain.build_tx_context(faucet.id(), &[], &[mint_note])?.build()?; let result = tx_context.execute().await; - // The distribute function uses ERR_ONLY_OWNER, which is "note sender is not the owner" + // The mint function uses ERR_ONLY_OWNER, which is "note sender is not the owner" let expected_error = ERR_SENDER_NOT_OWNER; assert_transaction_executor_error!(result, expected_error); @@ -761,22 +862,30 @@ async fn test_network_faucet_owner_storage() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = builder.add_existing_network_faucet("NET", 1000, owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; let _mock_chain = builder.build()?; // Verify owner is stored correctly - let stored_owner = faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot())?; + let stored_owner = faucet.storage().get_item(Ownable2Step::slot_name())?; - // Storage format: [0, 0, suffix, prefix] - assert_eq!(stored_owner[3], owner_account_id.prefix().as_felt()); - assert_eq!(stored_owner[2], Felt::new(owner_account_id.suffix().as_int())); - assert_eq!(stored_owner[1], Felt::new(0)); - assert_eq!(stored_owner[0], Felt::new(0)); + // Word: [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix] + assert_eq!(stored_owner[0], Felt::new(owner_account_id.suffix().as_canonical_u64())); + assert_eq!(stored_owner[1], owner_account_id.prefix().as_felt()); + assert_eq!(stored_owner[2], Felt::new(0)); // no nominated owner + assert_eq!(stored_owner[3], Felt::new(0)); Ok(()) } -/// Tests that transfer_ownership updates the owner correctly. +/// Tests that two-step transfer_ownership updates the owner correctly. +/// Step 1: Owner nominates a new owner via transfer_ownership. +/// Step 2: Nominated owner accepts via accept_ownership. #[tokio::test] async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { let mut builder = MockChain::builder(); @@ -796,12 +905,17 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = - builder.add_existing_network_faucet("NET", 1000, initial_owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + initial_owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into())?.into(); + let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?.into(); let output_note_tag = NoteTag::with_account_target(target_account.id()); let p2id_note = create_p2id_note_exact( @@ -816,7 +930,7 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { // Sanity Check: Prove that the initial owner can mint assets let mint_inputs = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); - let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(42u32); 4].into()); let mint_note = MintNote::create( faucet.id(), initial_owner_account_id, @@ -825,29 +939,27 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { &mut rng, )?; - // Action: Create transfer_ownership note script + // Step 1: Create transfer_ownership note script to nominate new owner let transfer_note_script_code = format!( r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.14 push.0 end - push.{new_owner_suffix} push.{new_owner_prefix} - call.network_faucet::transfer_ownership + push.{new_owner_suffix} + call.ownable2step::transfer_ownership dropw dropw dropw dropw end "#, new_owner_prefix = new_owner_account_id.prefix().as_felt(), - new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_int()), + new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_canonical_u64()), ); let source_manager = Arc::new(DefaultSourceManager::default()); - let transfer_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(transfer_note_script_code.clone())?; // Create the transfer note and add it to the builder so it exists on-chain - let mut rng = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(200u32); 4].into()); let transfer_note = NoteBuilder::new(initial_owner_account_id, &mut rng) .note_type(NoteType::Private) .tag(NoteTag::default().into()) @@ -856,7 +968,7 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { .build()?; // Add the transfer note to the builder before building the chain - builder.add_output_note(OutputNote::Full(transfer_note.clone())); + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); let mut mock_chain = builder.build()?; // Prove the block to make the transfer note exist on-chain @@ -867,10 +979,9 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { let executed_transaction = tx_context.execute().await?; assert_eq!(executed_transaction.output_notes().num_notes(), 1); - // Action: Execute transfer_ownership via note script + // Execute transfer_ownership via note script (nominates new owner) let tx_context = mock_chain .build_tx_context(faucet.id(), &[transfer_note.id()], &[])? - .add_note_script(transfer_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let executed_transaction = tx_context.execute().await?; @@ -879,48 +990,44 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { mock_chain.add_pending_executed_transaction(&executed_transaction)?; mock_chain.prove_next_block()?; - // Apply the delta to the faucet account to reflect the ownership change let mut updated_faucet = faucet.clone(); updated_faucet.apply_delta(executed_transaction.account_delta())?; - // Validation 1: Try to mint using the old owner - should fail - let mut rng = RpoRandomCoin::new([Felt::from(300u32); 4].into()); - let mint_note_old_owner = MintNote::create( - updated_faucet.id(), - initial_owner_account_id, - mint_inputs.clone(), - NoteAttachment::default(), - &mut rng, - )?; - - // Use the note as an unauthenticated note (full note object) - it will be created in this - // transaction - let tx_context = mock_chain - .build_tx_context(updated_faucet.id(), &[], &[mint_note_old_owner])? - .build()?; - let result = tx_context.execute().await; + // Step 2: Accept ownership as the nominated owner + let accept_note_script_code = r#" + use miden::standards::access::ownable2step - // The distribute function uses ERR_ONLY_OWNER, which is "note sender is not the owner" - let expected_error = ERR_SENDER_NOT_OWNER; - assert_transaction_executor_error!(result, expected_error); + begin + repeat.16 push.0 end + call.ownable2step::accept_ownership + dropw dropw dropw dropw + end + "#; - // Validation 2: Try to mint using the new owner - should succeed - let mut rng = RpoRandomCoin::new([Felt::from(400u32); 4].into()); - let mint_note_new_owner = MintNote::create( - updated_faucet.id(), - new_owner_account_id, - mint_inputs, - NoteAttachment::default(), - &mut rng, - )?; + let mut rng = RandomCoin::new([Felt::from(400u32); 4].into()); + let accept_note = NoteBuilder::new(new_owner_account_id, &mut rng) + .note_type(NoteType::Private) + .tag(NoteTag::default().into()) + .serial_number(Word::from([55, 66, 77, 88u32])) + .code(accept_note_script_code) + .build()?; let tx_context = mock_chain - .build_tx_context(updated_faucet.id(), &[], &[mint_note_new_owner])? + .build_tx_context(updated_faucet.clone(), &[], slice::from_ref(&accept_note))? + .with_source_manager(source_manager.clone()) .build()?; let executed_transaction = tx_context.execute().await?; - // Verify that minting succeeded - assert_eq!(executed_transaction.output_notes().num_notes(), 1); + let mut final_faucet = updated_faucet.clone(); + final_faucet.apply_delta(executed_transaction.account_delta())?; + + // Verify that owner changed to new_owner and nominated was cleared + // Word: [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix] + let stored_owner = final_faucet.storage().get_item(Ownable2Step::slot_name())?; + assert_eq!(stored_owner[0], Felt::new(new_owner_account_id.suffix().as_canonical_u64())); + assert_eq!(stored_owner[1], new_owner_account_id.prefix().as_felt()); + assert_eq!(stored_owner[2], Felt::new(0)); // nominated cleared + assert_eq!(stored_owner[3], Felt::new(0)); Ok(()) } @@ -951,32 +1058,36 @@ async fn test_network_faucet_only_owner_can_transfer() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = builder.add_existing_network_faucet("NET", 1000, owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; let mock_chain = builder.build()?; // Create transfer ownership note script let transfer_note_script_code = format!( r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.14 push.0 end - push.{new_owner_suffix} push.{new_owner_prefix} - call.network_faucet::transfer_ownership + push.{new_owner_suffix} + call.ownable2step::transfer_ownership dropw dropw dropw dropw end "#, new_owner_prefix = new_owner_account_id.prefix().as_felt(), - new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_int()), + new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_canonical_u64()), ); let source_manager = Arc::new(DefaultSourceManager::default()); - let transfer_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(transfer_note_script_code.clone())?; // Create a note from NON-OWNER that tries to transfer ownership - let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); let transfer_note = NoteBuilder::new(non_owner_account_id, &mut rng) .note_type(NoteType::Private) .tag(NoteTag::default().into()) @@ -986,14 +1097,11 @@ async fn test_network_faucet_only_owner_can_transfer() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(faucet.id(), &[], &[transfer_note])? - .add_note_script(transfer_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let result = tx_context.execute().await; - // Verify that the transaction failed with ERR_ONLY_OWNER - let expected_error = ERR_SENDER_NOT_OWNER; - assert_transaction_executor_error!(result, expected_error); + assert_transaction_executor_error!(result, ERR_SENDER_NOT_OWNER); Ok(()) } @@ -1017,50 +1125,50 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = builder.add_existing_network_faucet("NET", 1000, owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; // Check stored value before renouncing - let stored_owner_before = - faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot())?; - assert_eq!(stored_owner_before[3], owner_account_id.prefix().as_felt()); - assert_eq!(stored_owner_before[2], Felt::new(owner_account_id.suffix().as_int())); + let stored_owner_before = faucet.storage().get_item(Ownable2Step::slot_name())?; + assert_eq!(stored_owner_before[0], Felt::new(owner_account_id.suffix().as_canonical_u64())); + assert_eq!(stored_owner_before[1], owner_account_id.prefix().as_felt()); // Create renounce_ownership note script let renounce_note_script_code = r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.16 push.0 end - call.network_faucet::renounce_ownership + call.ownable2step::renounce_ownership dropw dropw dropw dropw end "#; let source_manager = Arc::new(DefaultSourceManager::default()); - let renounce_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(renounce_note_script_code)?; // Create transfer note script (will be used after renounce) let transfer_note_script_code = format!( r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.14 push.0 end - push.{new_owner_suffix} push.{new_owner_prefix} - call.network_faucet::transfer_ownership + push.{new_owner_suffix} + call.ownable2step::transfer_ownership dropw dropw dropw dropw end "#, new_owner_prefix = new_owner_account_id.prefix().as_felt(), - new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_int()), + new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_canonical_u64()), ); - let transfer_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(transfer_note_script_code.clone())?; - - let mut rng = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(200u32); 4].into()); let renounce_note = NoteBuilder::new(owner_account_id, &mut rng) .note_type(NoteType::Private) .tag(NoteTag::default().into()) @@ -1068,7 +1176,7 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { .code(renounce_note_script_code) .build()?; - let mut rng = RpoRandomCoin::new([Felt::from(300u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(300u32); 4].into()); let transfer_note = NoteBuilder::new(owner_account_id, &mut rng) .note_type(NoteType::Private) .tag(NoteTag::default().into()) @@ -1076,15 +1184,14 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { .code(transfer_note_script_code.clone()) .build()?; - builder.add_output_note(OutputNote::Full(renounce_note.clone())); - builder.add_output_note(OutputNote::Full(transfer_note.clone())); + builder.add_output_note(RawOutputNote::Full(renounce_note.clone())); + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_next_block()?; // Execute renounce_ownership let tx_context = mock_chain .build_tx_context(faucet.id(), &[renounce_note.id()], &[])? - .add_note_script(renounce_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let executed_transaction = tx_context.execute().await?; @@ -1096,27 +1203,22 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { updated_faucet.apply_delta(executed_transaction.account_delta())?; // Check stored value after renouncing - should be zero - let stored_owner_after = - updated_faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot())?; + let stored_owner_after = updated_faucet.storage().get_item(Ownable2Step::slot_name())?; assert_eq!(stored_owner_after[0], Felt::new(0)); assert_eq!(stored_owner_after[1], Felt::new(0)); assert_eq!(stored_owner_after[2], Felt::new(0)); assert_eq!(stored_owner_after[3], Felt::new(0)); // Try to transfer ownership - should fail because there's no owner - // The transfer note was already added to the builder, so we need to prove another block - // to make it available on-chain after the renounce transaction mock_chain.prove_next_block()?; let tx_context = mock_chain .build_tx_context(updated_faucet.id(), &[transfer_note.id()], &[])? - .add_note_script(transfer_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let result = tx_context.execute().await; - let expected_error = ERR_SENDER_NOT_OWNER; - assert_transaction_executor_error!(result, expected_error); + assert_transaction_executor_error!(result, ERR_SENDER_NOT_OWNER); Ok(()) } @@ -1149,15 +1251,20 @@ async fn network_faucet_burn() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let mut faucet = - builder.add_existing_network_faucet("NET", 200, faucet_owner_account_id, Some(100))?; + let mut faucet = builder.add_existing_network_faucet( + "NET", + 200, + faucet_owner_account_id, + Some(100), + OwnerControlledInitConfig::OwnerOnly, + )?; let burn_amount = 100u64; let fungible_asset = FungibleAsset::new(faucet.id(), burn_amount).unwrap(); // CREATE BURN NOTE // -------------------------------------------------------------------------------------------- - let mut rng = RpoRandomCoin::new([Felt::from(99u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(99u32); 4].into()); let note = BurnNote::create( faucet_owner_account_id, faucet.id(), @@ -1166,7 +1273,7 @@ async fn network_faucet_burn() -> anyhow::Result<()> { &mut rng, )?; - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_next_block()?; @@ -1189,7 +1296,10 @@ async fn network_faucet_burn() -> anyhow::Result<()> { // Apply the delta to the faucet account and verify the token issuance decreased faucet.apply_delta(executed_transaction.account_delta())?; let final_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); - assert_eq!(final_token_supply, Felt::new(initial_token_supply.as_int() - burn_amount)); + assert_eq!( + final_token_supply, + Felt::new(initial_token_supply.as_canonical_u64() - burn_amount) + ); Ok(()) } @@ -1213,12 +1323,18 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow AccountStorageMode::Private, ); - let faucet = - builder.add_existing_network_faucet("NET", 1000, faucet_owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + 1000, + faucet_owner_account_id, + Some(50), + OwnerControlledInitConfig::OwnerOnly, + )?; let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + let mint_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); let serial_num = Word::from([1, 2, 3, 4u32]); // Create the expected P2ID output note @@ -1249,7 +1365,7 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow }, }; - let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); + let mut rng = RandomCoin::new([Felt::from(42u32); 4].into()); let mint_note = MintNote::create( faucet.id(), faucet_owner_account_id, @@ -1258,7 +1374,7 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow &mut rng, )?; - builder.add_output_note(OutputNote::Full(mint_note.clone())); + builder.add_output_note(RawOutputNote::Full(mint_note.clone())); let mut mock_chain = builder.build()?; let tx_context = mock_chain.build_tx_context(faucet.id(), &[mint_note.id()], &[])?.build()?; @@ -1277,8 +1393,8 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow NoteType::Public => { // For public notes, we get OutputNote::Full and can compare key properties let created_note = match output_note { - OutputNote::Full(note) => note, - _ => panic!("Expected OutputNote::Full variant for public note"), + RawOutputNote::Full(note) => note, + _ => panic!("Expected OutputNote::Full variant"), }; assert_eq!(created_note, &p2id_mint_output_note); @@ -1297,9 +1413,100 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow target_account_mut.apply_delta(consume_executed_transaction.account_delta())?; - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; + let expected_asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let balance = target_account_mut.vault().get_balance(faucet.id())?; - assert_eq!(balance, expected_asset.amount()); + assert_eq!(balance, expected_asset.amount().inner()); + + Ok(()) +} + +/// Tests that calling mint multiple times in a single transaction produces output notes +/// with the correct individual amounts, not the cumulative vault totals. +#[tokio::test] +async fn multiple_mints_in_single_tx_produce_correct_amounts() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + 300, + None, + )?; + let mock_chain = builder.build()?; + + let recipient_1 = Word::from([0, 1, 2, 3u32]); + let recipient_2 = Word::from([4, 5, 6, 7u32]); + let tag = NoteTag::default(); + let note_type = NoteType::Private; + let amount_1: u64 = 100; + let amount_2: u64 = 50; + + let tx_script_code = format!( + " + begin + # --- First mint: mint {amount_1} tokens to recipient_1 --- + padw padw push.0 + + push.{recipient_1} + push.{note_type} + push.{tag} + push.{amount_1} + # => [amount_1, tag, note_type, RECIPIENT_1, pad(9)] + + call.::miden::standards::faucets::basic_fungible::mint_and_send + # => [note_idx, pad(15)] + + # clean up the stack before the second call + dropw dropw dropw dropw + + # --- Second mint: mint {amount_2} tokens to recipient_2 --- + padw padw push.0 + + push.{recipient_2} + push.{note_type} + push.{tag} + push.{amount_2} + # => [amount_2, tag, note_type, RECIPIENT_2, pad(9)] + + call.::miden::standards::faucets::basic_fungible::mint_and_send + # => [note_idx, pad(15)] + + # truncate the stack + dropw dropw dropw dropw + end + ", + note_type = note_type as u8, + tag = u32::from(tag), + ); + + let source_manager = Arc::new(DefaultSourceManager::default()); + let tx_script = CodeBuilder::with_source_manager(source_manager.clone()) + .compile_tx_script(tx_script_code)?; + let tx_context = mock_chain + .build_tx_context(faucet.clone(), &[], &[])? + .tx_script(tx_script) + .with_source_manager(source_manager) + .build()?; + + let executed_transaction = tx_context.execute().await?; + + // Verify two output notes were created + assert_eq!(executed_transaction.output_notes().num_notes(), 2); + + // Verify first note has exactly amount_1 tokens. + let expected_asset_1: Asset = FungibleAsset::new(faucet.id(), amount_1)?.into(); + let output_note_1 = executed_transaction.output_notes().get_note(0); + let assets_1 = NoteAssets::new(vec![expected_asset_1])?; + let expected_id_1 = NoteId::new(recipient_1, assets_1.commitment()); + assert_eq!(output_note_1.id(), expected_id_1); + + // Verify second note has exactly amount_2 tokens. + let expected_asset_2: Asset = FungibleAsset::new(faucet.id(), amount_2)?.into(); + let output_note_2 = executed_transaction.output_notes().get_note(1); + let assets_2 = NoteAssets::new(vec![expected_asset_2])?; + let expected_id_2 = NoteId::new(recipient_2, assets_2.commitment()); + assert_eq!(output_note_2.id(), expected_id_2); Ok(()) } diff --git a/crates/miden-testing/tests/scripts/fee.rs b/crates/miden-testing/tests/scripts/fee.rs index 3fd41d7b78..8f9e6323d9 100644 --- a/crates/miden-testing/tests/scripts/fee.rs +++ b/crates/miden-testing/tests/scripts/fee.rs @@ -30,7 +30,7 @@ async fn prove_account_creation_with_fees() -> anyhow::Result<()> { .context("failed to execute account-creating transaction")?; let expected_fee = tx.compute_fee(); - assert_eq!(expected_fee, tx.fee().amount()); + assert_eq!(expected_fee, tx.fee().amount().inner()); // We expect that the new account contains the amount minus the paid fee. let added_asset = FungibleAsset::new(chain.native_asset_id(), amount)?.sub(tx.fee())?; @@ -45,7 +45,7 @@ async fn prove_account_creation_with_fees() -> anyhow::Result<()> { // account commitment should not be the empty word assert_ne!(tx.account_delta().to_commitment(), Word::empty()); - prove_and_verify_transaction(tx)?; + prove_and_verify_transaction(tx).await?; Ok(()) } diff --git a/crates/miden-testing/tests/scripts/mod.rs b/crates/miden-testing/tests/scripts/mod.rs index 58bf4152ad..8d15402744 100644 --- a/crates/miden-testing/tests/scripts/mod.rs +++ b/crates/miden-testing/tests/scripts/mod.rs @@ -1,5 +1,6 @@ mod faucet; mod fee; +mod ownable2step; mod p2id; mod p2ide; mod send_note; diff --git a/crates/miden-testing/tests/scripts/ownable2step.rs b/crates/miden-testing/tests/scripts/ownable2step.rs new file mode 100644 index 0000000000..c0441acaff --- /dev/null +++ b/crates/miden-testing/tests/scripts/ownable2step.rs @@ -0,0 +1,491 @@ +extern crate alloc; + +use alloc::sync::Arc; + +use miden_processor::crypto::random::RandomCoin; +use miden_protocol::Felt; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountComponent, + AccountId, + AccountStorageMode, + AccountType, + StorageSlot, +}; +use miden_protocol::assembly::DefaultSourceManager; +use miden_protocol::assembly::debuginfo::SourceManagerSync; +use miden_protocol::note::Note; +use miden_protocol::testing::account_id::AccountIdBuilder; +use miden_protocol::transaction::RawOutputNote; +use miden_standards::account::access::Ownable2Step; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::errors::standards::{ + ERR_NO_NOMINATED_OWNER, + ERR_SENDER_NOT_NOMINATED_OWNER, + ERR_SENDER_NOT_OWNER, +}; +use miden_standards::testing::note::NoteBuilder; +use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; + +// HELPERS +// ================================================================================================ + +fn create_ownable_account( + owner: AccountId, + initial_storage: Vec, +) -> anyhow::Result { + let component_code = r#" + use miden::standards::access::ownable2step + pub use ownable2step::get_owner + pub use ownable2step::get_nominated_owner + pub use ownable2step::transfer_ownership + pub use ownable2step::accept_ownership + pub use ownable2step::renounce_ownership + "#; + let component_code_obj = + CodeBuilder::default().compile_component_code("test::ownable", component_code)?; + + let mut storage_slots = initial_storage; + storage_slots.push(Ownable2Step::new(owner).to_storage_slot()); + + let account = AccountBuilder::new([1; 32]) + .storage_mode(AccountStorageMode::Public) + .with_auth_component(Auth::IncrNonce) + .with_component({ + let metadata = AccountComponentMetadata::new("test::ownable", AccountType::all()); + AccountComponent::new(component_code_obj, storage_slots, metadata)? + }) + .build_existing()?; + Ok(account) +} + +fn get_owner_from_storage(account: &Account) -> anyhow::Result> { + let ownable = Ownable2Step::try_from_storage(account.storage())?; + Ok(ownable.owner()) +} + +fn get_nominated_owner_from_storage(account: &Account) -> anyhow::Result> { + let ownable = Ownable2Step::try_from_storage(account.storage())?; + Ok(ownable.nominated_owner()) +} + +fn create_transfer_note( + sender: AccountId, + new_owner: AccountId, + rng: &mut RandomCoin, + source_manager: Arc, +) -> anyhow::Result { + let script = format!( + r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.14 push.0 end + push.{new_owner_prefix} + push.{new_owner_suffix} + call.test_account::transfer_ownership + dropw dropw dropw dropw + end + "#, + new_owner_prefix = new_owner.prefix().as_felt(), + new_owner_suffix = Felt::new(new_owner.suffix().as_canonical_u64()), + ); + + let note = NoteBuilder::new(sender, rng) + .source_manager(source_manager) + .code(script) + .build()?; + + Ok(note) +} + +fn create_accept_note( + sender: AccountId, + rng: &mut RandomCoin, + source_manager: Arc, +) -> anyhow::Result { + let script = r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.16 push.0 end + call.test_account::accept_ownership + dropw dropw dropw dropw + end + "#; + + let note = NoteBuilder::new(sender, rng) + .source_manager(source_manager) + .code(script) + .build()?; + + Ok(note) +} + +fn create_renounce_note( + sender: AccountId, + rng: &mut RandomCoin, + source_manager: Arc, +) -> anyhow::Result { + let script = r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.16 push.0 end + call.test_account::renounce_ownership + dropw dropw dropw dropw + end + "#; + + let note = NoteBuilder::new(sender, rng) + .source_manager(source_manager) + .code(script) + .build()?; + + Ok(note) +} + +// TESTS +// ================================================================================================ + +#[tokio::test] +async fn test_transfer_ownership_only_owner() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let non_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([3; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let note = create_transfer_note(non_owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let result = tx.execute().await; + + assert_transaction_executor_error!(result, ERR_SENDER_NOT_OWNER); + Ok(()) +} + +#[tokio::test] +async fn test_complete_ownership_transfer() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer ownership + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Verify intermediate state: owner unchanged, nominated set + assert_eq!(get_owner_from_storage(&updated)?, Some(owner)); + assert_eq!(get_nominated_owner_from_storage(&updated)?, Some(new_owner)); + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: accept ownership + let mut rng2 = RandomCoin::new([Felt::from(200u32); 4].into()); + let accept_note = create_accept_note(new_owner, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&accept_note))? + .with_source_manager(source_manager) + .build()?; + let executed2 = tx2.execute().await?; + + let mut final_account = updated.clone(); + final_account.apply_delta(executed2.account_delta())?; + + assert_eq!(get_owner_from_storage(&final_account)?, Some(new_owner)); + assert_eq!(get_nominated_owner_from_storage(&final_account)?, None); + Ok(()) +} + +#[tokio::test] +async fn test_accept_ownership_only_nominated_owner() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + let wrong = AccountIdBuilder::new().build_with_seed([3; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: wrong account tries accept + let mut rng2 = RandomCoin::new([Felt::from(200u32); 4].into()); + let accept_note = create_accept_note(wrong, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&accept_note))? + .with_source_manager(source_manager) + .build()?; + let result = tx2.execute().await; + + assert_transaction_executor_error!(result, ERR_SENDER_NOT_NOMINATED_OWNER); + Ok(()) +} + +#[tokio::test] +async fn test_accept_ownership_no_nominated() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(200u32); 4].into()); + let accept_note = create_accept_note(owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(accept_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[accept_note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let result = tx.execute().await; + + assert_transaction_executor_error!(result, ERR_NO_NOMINATED_OWNER); + Ok(()) +} + +#[tokio::test] +async fn test_cancel_transfer() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: cancel by transferring to self (owner) + let mut rng2 = RandomCoin::new([Felt::from(200u32); 4].into()); + let cancel_note = create_transfer_note(owner, owner, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&cancel_note))? + .with_source_manager(source_manager) + .build()?; + let executed2 = tx2.execute().await?; + + let mut final_account = updated.clone(); + final_account.apply_delta(executed2.account_delta())?; + + assert_eq!(get_nominated_owner_from_storage(&final_account)?, None); + assert_eq!(get_owner_from_storage(&final_account)?, Some(owner)); + Ok(()) +} + +/// Tests that an owner can transfer to themselves when no nominated transfer exists. +/// This is a no-op but should succeed without errors. +#[tokio::test] +async fn test_transfer_to_self_no_nominated() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let note = create_transfer_note(owner, owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + assert_eq!(get_owner_from_storage(&updated)?, Some(owner)); + assert_eq!(get_nominated_owner_from_storage(&updated)?, None); + Ok(()) +} + +#[tokio::test] +async fn test_renounce_ownership() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer (to have nominated) + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: renounce + let mut rng2 = RandomCoin::new([Felt::from(200u32); 4].into()); + let renounce_note = create_renounce_note(owner, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&renounce_note))? + .with_source_manager(source_manager) + .build()?; + let executed2 = tx2.execute().await?; + + let mut final_account = updated.clone(); + final_account.apply_delta(executed2.account_delta())?; + + assert_eq!(get_owner_from_storage(&final_account)?, None); + assert_eq!(get_nominated_owner_from_storage(&final_account)?, None); + Ok(()) +} + +/// Tests that transfer_ownership fails when the new owner account ID is invalid. +/// An invalid account ID has its suffix's lower 8 bits set to a non-zero value. +#[tokio::test] +async fn test_transfer_ownership_fails_with_invalid_account_id() -> anyhow::Result<()> { + use miden_protocol::errors::protocol::ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO; + + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let invalid_prefix = owner.prefix().as_felt(); + let invalid_suffix = Felt::new(1); + + let script = format!( + r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.14 push.0 end + push.{invalid_suffix} + push.{invalid_prefix} + call.test_account::transfer_ownership + dropw dropw dropw dropw + end + "#, + ); + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RandomCoin::new([Felt::from(100u32); 4].into()); + let note = NoteBuilder::new(owner, &mut rng) + .source_manager(Arc::clone(&source_manager)) + .code(script) + .build()?; + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let result = tx.execute().await; + + assert_transaction_executor_error!( + result, + ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO + ); + Ok(()) +} diff --git a/crates/miden-testing/tests/scripts/p2id.rs b/crates/miden-testing/tests/scripts/p2id.rs index f0676bed4a..f1918a2987 100644 --- a/crates/miden-testing/tests/scripts/p2id.rs +++ b/crates/miden-testing/tests/scripts/p2id.rs @@ -1,7 +1,7 @@ use miden_protocol::account::Account; use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::{Asset, AssetVault, FungibleAsset}; -use miden_protocol::crypto::rand::RpoRandomCoin; +use miden_protocol::crypto::rand::RandomCoin; use miden_protocol::note::{NoteAttachment, NoteTag, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, @@ -10,7 +10,7 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2, ACCOUNT_ID_SENDER, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::{Felt, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::errors::standards::ERR_P2ID_TARGET_ACCT_MISMATCH; @@ -31,12 +31,15 @@ async fn p2id_script_multiple_assets() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create accounts - let sender_account = - builder.create_new_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - let target_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - let malicious_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let sender_account = builder.create_new_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let target_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let malicious_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create the note let note = builder.add_p2id_note( @@ -96,10 +99,12 @@ async fn prove_consume_note_with_new_account() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create accounts - let sender_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - let target_account = - builder.create_new_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let sender_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let target_account = builder.create_new_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create the note let note = builder.add_p2id_note( @@ -134,7 +139,7 @@ async fn prove_consume_note_with_new_account() -> anyhow::Result<()> { executed_transaction.final_account().to_commitment(), target_account_after.to_commitment() ); - prove_and_verify_transaction(executed_transaction)?; + prove_and_verify_transaction(executed_transaction).await?; Ok(()) } @@ -146,8 +151,9 @@ async fn prove_consume_multiple_notes() -> anyhow::Result<()> { let fungible_asset_2: Asset = FungibleAsset::mock(23); let mut builder = MockChain::builder(); - let mut account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let mut account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into()?, account.id(), @@ -172,12 +178,12 @@ async fn prove_consume_multiple_notes() -> anyhow::Result<()> { account.apply_delta(executed_transaction.account_delta())?; let resulting_asset = account.vault().assets().next().unwrap(); if let Asset::Fungible(asset) = resulting_asset { - assert_eq!(asset.amount(), 123u64); + assert_eq!(asset.amount().inner(), 123u64); } else { panic!("Resulting asset should be fungible"); } - Ok(prove_and_verify_transaction(executed_transaction)?) + Ok(prove_and_verify_transaction(executed_transaction).await?) } /// Consumes two existing notes and creates two other notes in the same transaction @@ -186,7 +192,9 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let mut account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [FungibleAsset::mock(20)], )?; @@ -211,22 +219,25 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { let mock_chain = builder.build()?; + let asset_1 = FungibleAsset::mock(10); + let asset_2 = FungibleAsset::mock(5); + let output_note_1 = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into()?, - vec![FungibleAsset::mock(10)], + vec![asset_1], NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), )?; let output_note_2 = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into()?, - vec![FungibleAsset::mock(5)], + vec![asset_2], NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(Word::from([4, 3, 2, 1u32])), + &mut RandomCoin::new(Word::from([4, 3, 2, 1u32])), )?; let tx_script_src = &format!( @@ -238,7 +249,8 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { push.{tag_1} exec.output_note::create - push.{asset_1} + push.{ASSET_VALUE_1} + push.{ASSET_KEY_1} call.::miden::standards::wallets::basic::move_asset_to_note dropw dropw dropw dropw @@ -247,7 +259,8 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { push.{tag_2} exec.output_note::create - push.{asset_2} + push.{ASSET_VALUE_2} + push.{ASSET_KEY_2} call.::miden::standards::wallets::basic::move_asset_to_note dropw dropw dropw dropw end @@ -255,11 +268,13 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { recipient_1 = output_note_1.recipient().digest(), note_type_1 = NoteType::Public as u8, tag_1 = Felt::from(output_note_1.metadata().tag()), - asset_1 = Word::from(FungibleAsset::mock(10)), + ASSET_KEY_1 = asset_1.to_key_word(), + ASSET_VALUE_1 = asset_1.to_value_word(), recipient_2 = output_note_2.recipient().digest(), note_type_2 = NoteType::Public as u8, tag_2 = Felt::from(output_note_2.metadata().tag()), - asset_2 = Word::from(FungibleAsset::mock(5)), + ASSET_KEY_2 = asset_2.to_key_word(), + ASSET_VALUE_2 = asset_2.to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; @@ -267,8 +282,8 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[input_note_1.id(), input_note_2.id()], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(output_note_1), - OutputNote::Full(output_note_2), + RawOutputNote::Full(output_note_1), + RawOutputNote::Full(output_note_2), ]) .tx_script(tx_script) .build()?; @@ -292,11 +307,14 @@ async fn test_p2id_new_constructor() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let sender_account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [FungibleAsset::mock(100)], )?; - let target_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let target_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let mock_chain = builder.build()?; @@ -316,19 +334,19 @@ async fn test_p2id_new_constructor() -> anyhow::Result<()> { begin # Push inputs for p2id::new - # Inputs: [target_id_prefix, target_id_suffix, tag, note_type, SERIAL_NUM] push.{serial_num} push.{note_type} push.{tag} - push.{target_suffix} push.{target_prefix} - # => [target_id_prefix, target_id_suffix, tag, note_type, SERIAL_NUM] + push.{target_suffix} + # => [target_id_suffix, target_id_prefix, tag, note_type, SERIAL_NUM] exec.p2id::new # => [note_idx] # Add an asset to the created note - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note # Clean up stack @@ -340,7 +358,8 @@ async fn test_p2id_new_constructor() -> anyhow::Result<()> { tag = Felt::from(tag), note_type = NoteType::Public as u8, serial_num = serial_num, - asset = Word::from(FungibleAsset::mock(50)), + ASSET_KEY = FungibleAsset::mock(50).to_key_word(), + ASSET_VALUE = FungibleAsset::mock(50).to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(&tx_script_src)?; @@ -352,12 +371,12 @@ async fn test_p2id_new_constructor() -> anyhow::Result<()> { vec![FungibleAsset::mock(50)], NoteType::Public, NoteAttachment::default(), - &mut RpoRandomCoin::new(serial_num), + &mut RandomCoin::new(serial_num), )?; let tx_context = mock_chain .build_tx_context(sender_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(expected_output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(expected_output_note)]) .tx_script(tx_script) .build()?; diff --git a/crates/miden-testing/tests/scripts/p2ide.rs b/crates/miden-testing/tests/scripts/p2ide.rs index bfab2e3a38..67d1ea41c0 100644 --- a/crates/miden-testing/tests/scripts/p2ide.rs +++ b/crates/miden-testing/tests/scripts/p2ide.rs @@ -372,12 +372,15 @@ fn setup_p2ide_test( let mut builder = MockChain::builder(); // Create sender and target accounts - let sender_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - let target_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; - let malicious_account = - builder.add_existing_wallet(Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo })?; + let sender_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let target_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let malicious_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2ide_note = builder.add_p2ide_note( sender_account.id(), diff --git a/crates/miden-testing/tests/scripts/send_note.rs b/crates/miden-testing/tests/scripts/send_note.rs index a7539068c2..9859302858 100644 --- a/crates/miden-testing/tests/scripts/send_note.rs +++ b/crates/miden-testing/tests/scripts/send_note.rs @@ -2,8 +2,8 @@ use core::slice; use std::collections::BTreeMap; use miden_protocol::account::auth::AuthScheme; -use miden_protocol::asset::{Asset, FungibleAsset}; -use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; +use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; +use miden_protocol::crypto::rand::{FeltRng, RandomCoin}; use miden_protocol::note::{ Note, NoteAssets, @@ -16,25 +16,46 @@ use miden_protocol::note::{ NoteType, PartialNote, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::{Felt, Word}; use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; use miden_standards::code_builder::CodeBuilder; +use miden_testing::utils::create_p2any_note; use miden_testing::{Auth, MockChain}; /// Tests the execution of the generated send_note transaction script in case the sending account /// has the [`BasicWallet`][wallet] interface. /// +/// This tests consumes a SPAWN note first so that the note_idx in the send_note script is not zero +/// to make sure the note_idx is correctly kept on the stack. +/// +/// The test also sends two assets to make sure the generated script deals correctly with multiple +/// assets. +/// /// [wallet]: miden_standards::account::interface::AccountComponentInterface::BasicWallet #[tokio::test] async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { - let sent_asset = FungibleAsset::mock(10); + let total_asset = FungibleAsset::mock(100); + let sent_asset0 = NonFungibleAsset::mock(&[4, 5, 6]); + + let sent_asset1 = FungibleAsset::mock(10); + let sent_asset2 = FungibleAsset::mock(40); let mut builder = MockChain::builder(); + let sender_basic_wallet_account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, - [FungibleAsset::mock(100)], + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [sent_asset0, total_asset], )?; + let p2any_note = create_p2any_note( + sender_basic_wallet_account.id(), + NoteType::Private, + [sent_asset2], + &mut RandomCoin::new(Word::from([1, 2, 3, 4u32])), + ); + let spawn_note = builder.add_spawn_note([&p2any_note])?; let mock_chain = builder.build()?; let sender_account_interface = AccountInterface::from_account(&sender_basic_wallet_account); @@ -45,9 +66,9 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { let metadata = NoteMetadata::new(sender_basic_wallet_account.id(), NoteType::Public) .with_tag(tag) .with_attachment(attachment.clone()); - let assets = NoteAssets::new(vec![sent_asset]).unwrap(); + let assets = NoteAssets::new(vec![sent_asset0, sent_asset1]).unwrap(); let note_script = CodeBuilder::default().compile_note_script("begin nop end").unwrap(); - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let note = Note::new(assets.clone(), metadata, recipient); @@ -58,10 +79,10 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { .build_send_notes_script(slice::from_ref(&partial_note), Some(expiration_delta))?; let executed_transaction = mock_chain - .build_tx_context(sender_basic_wallet_account.id(), &[], &[]) + .build_tx_context(sender_basic_wallet_account.id(), &[spawn_note.id()], &[]) .expect("failed to build tx context") .tx_script(send_note_transaction_script) - .extend_expected_output_notes(vec![OutputNote::Full(note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(note.clone())]) .build()? .execute() .await?; @@ -73,13 +94,22 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { .removed_assets() .map(|asset| (asset.vault_key(), asset)) .collect(); - assert_eq!(removed_assets.len(), 1, "one asset should have been removed"); + assert_eq!(removed_assets.len(), 2, "two assets should have been removed"); + assert_eq!( + removed_assets.remove(&sent_asset0.vault_key()).unwrap(), + sent_asset0, + "sent asset0 should be in removed assets" + ); + assert_eq!( + removed_assets.remove(&sent_asset1.vault_key()).unwrap(), + sent_asset1.unwrap_fungible().add(sent_asset2.unwrap_fungible())?.into(), + "sent asset1 + sent_asset2 should be in removed assets" + ); assert_eq!( - removed_assets.remove(&sent_asset.vault_key()).unwrap(), - sent_asset, - "sent asset should be in removed assets" + executed_transaction.output_notes().get_note(0), + &RawOutputNote::Partial(p2any_note.into()) ); - assert_eq!(executed_transaction.output_notes().get_note(0), &OutputNote::Full(note)); + assert_eq!(executed_transaction.output_notes().get_note(1), &RawOutputNote::Full(note)); Ok(()) } @@ -92,7 +122,9 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { async fn test_send_note_script_basic_fungible_faucet() -> anyhow::Result<()> { let mut builder = MockChain::builder(); let sender_basic_fungible_faucet_account = builder.add_existing_basic_faucet( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, "POL", 200, None, @@ -111,7 +143,7 @@ async fn test_send_note_script_basic_fungible_faucet() -> anyhow::Result<()> { FungibleAsset::new(sender_basic_fungible_faucet_account.id(), 10).unwrap(), )])?; let note_script = CodeBuilder::default().compile_note_script("begin nop end").unwrap(); - let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); + let serial_num = RandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let note = Note::new(assets.clone(), metadata, recipient); @@ -125,12 +157,12 @@ async fn test_send_note_script_basic_fungible_faucet() -> anyhow::Result<()> { .build_tx_context(sender_basic_fungible_faucet_account.id(), &[], &[]) .expect("failed to build tx context") .tx_script(send_note_transaction_script) - .extend_expected_output_notes(vec![OutputNote::Full(note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(note.clone())]) .build()? .execute() .await?; - assert_eq!(executed_transaction.output_notes().get_note(0), &OutputNote::Full(note)); + assert_eq!(executed_transaction.output_notes().get_note(0), &RawOutputNote::Full(note)); Ok(()) } diff --git a/crates/miden-testing/tests/scripts/swap.rs b/crates/miden-testing/tests/scripts/swap.rs index 9fd4296393..0cd95695a9 100644 --- a/crates/miden-testing/tests/scripts/swap.rs +++ b/crates/miden-testing/tests/scripts/swap.rs @@ -1,18 +1,17 @@ use anyhow::Context; +use miden_protocol::Felt; use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId, AccountStorageMode, AccountType}; use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; -use miden_protocol::errors::NoteError; -use miden_protocol::note::{Note, NoteAssets, NoteDetails, NoteMetadata, NoteTag, NoteType}; +use miden_protocol::note::{Note, NoteDetails, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, AccountIdBuilder, }; -use miden_protocol::transaction::OutputNote; -use miden_protocol::{Felt, Word}; +use miden_protocol::transaction::RawOutputNote; use miden_standards::code_builder::CodeBuilder; -use miden_standards::note::P2idNoteStorage; +use miden_testing::utils::create_p2id_note_exact; use miden_testing::{Auth, MockChain}; use crate::prove_and_verify_transaction; @@ -41,7 +40,8 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { push.{tag} exec.output_note::create - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note dropw dropw dropw dropw end @@ -49,7 +49,8 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { recipient = swap_note.recipient().digest(), note_type = NoteType::Public as u8, tag = Felt::from(swap_note.metadata().tag()), - asset = Word::from(offered_asset), + ASSET_KEY = offered_asset.to_key_word(), + ASSET_VALUE = offered_asset.to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; @@ -58,7 +59,7 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { .build_tx_context(sender_account.id(), &[], &[]) .context("failed to build tx context")? .tx_script(tx_script) - .extend_expected_output_notes(vec![OutputNote::Full(swap_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(swap_note.clone())]) .build()? .execute() .await?; @@ -80,8 +81,8 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { ); let swap_output_note = create_swap_note_tx.output_notes().iter().next().unwrap(); - assert_eq!(swap_output_note.assets().unwrap().iter().next().unwrap(), &offered_asset); - assert!(prove_and_verify_transaction(create_swap_note_tx).is_ok()); + assert_eq!(swap_output_note.assets().iter().next().unwrap(), &offered_asset); + assert!(prove_and_verify_transaction(create_swap_note_tx).await.is_ok()); Ok(()) } @@ -119,7 +120,7 @@ async fn consume_swap_note_private_payback_note() -> anyhow::Result<()> { let output_payback_note = consume_swap_note_tx.output_notes().iter().next().unwrap().clone(); assert!(output_payback_note.id() == payback_note.id()); - assert_eq!(output_payback_note.assets().unwrap().iter().next().unwrap(), &requested_asset); + assert_eq!(output_payback_note.assets().iter().next().unwrap(), &requested_asset); assert!(target_account.vault().assets().count() == 1); assert!(target_account.vault().assets().any(|asset| asset == offered_asset)); @@ -147,9 +148,11 @@ async fn consume_swap_note_private_payback_note() -> anyhow::Result<()> { assert!(sender_account.vault().assets().any(|asset| asset == requested_asset)); prove_and_verify_transaction(consume_swap_note_tx) + .await .context("failed to prove/verify consume_swap_note_tx")?; prove_and_verify_transaction(consume_payback_tx) + .await .context("failed to prove/verify consume_payback_tx")?; Ok(()) @@ -188,7 +191,7 @@ async fn consume_swap_note_public_payback_note() -> anyhow::Result<()> { let consume_swap_note_tx = mock_chain .build_tx_context(target_account.id(), &[swap_note.id()], &[]) .context("failed to build tx context")? - .extend_expected_output_notes(vec![OutputNote::Full(payback_p2id_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(payback_p2id_note)]) .build()? .execute() .await?; @@ -197,7 +200,7 @@ async fn consume_swap_note_public_payback_note() -> anyhow::Result<()> { let output_payback_note = consume_swap_note_tx.output_notes().iter().next().unwrap().clone(); assert!(output_payback_note.id() == payback_note.id()); - assert_eq!(output_payback_note.assets().unwrap().iter().next().unwrap(), &requested_asset); + assert_eq!(output_payback_note.assets().iter().next().unwrap(), &requested_asset); assert!(target_account.vault().assets().count() == 1); assert!(target_account.vault().assets().any(|asset| asset == offered_asset)); @@ -239,7 +242,9 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { // CREATE ACCOUNT 1: Has asset A, wants asset B // -------------------------------------------------------------------------------------------- let account_1 = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, vec![asset_a], )?; @@ -250,7 +255,9 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { // CREATE ACCOUNT 2: Has asset B, wants asset A // -------------------------------------------------------------------------------------------- let account_2 = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, vec![asset_b], )?; let (swap_note_2, payback_note_2) = @@ -261,7 +268,9 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { // TODO: matcher account should be able to fill both SWAP notes without holding assets A & B let matcher_account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, vec![asset_a, asset_b], )?; // Initial matching account balance should have two assets. @@ -293,10 +302,10 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { .expect("Payback note 2 not found"); // Verify payback note 1 contains exactly the initially requested asset B for account 1 - assert_eq!(output_payback_1.assets().unwrap().iter().next().unwrap(), &asset_b); + assert_eq!(output_payback_1.assets().iter().next().unwrap(), &asset_b); // Verify payback note 2 contains exactly the initially requested asset A for account 2 - assert_eq!(output_payback_2.assets().unwrap().iter().next().unwrap(), &asset_a); + assert_eq!(output_payback_2.assets().iter().next().unwrap(), &asset_a); Ok(()) } @@ -322,11 +331,15 @@ fn setup_swap_test(payback_note_type: NoteType) -> anyhow::Result let mut builder = MockChain::builder(); let sender_account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, vec![offered_asset], )?; let target_account = builder.add_existing_wallet_with_assets( - Auth::BasicAuth { auth_scheme: AuthScheme::Falcon512Rpo }, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, vec![requested_asset], )?; @@ -334,7 +347,7 @@ fn setup_swap_test(payback_note_type: NoteType) -> anyhow::Result .add_swap_note(sender_account.id(), offered_asset, requested_asset, payback_note_type) .unwrap(); - builder.add_output_note(OutputNote::Full(swap_note.clone())); + builder.add_output_note(RawOutputNote::Full(swap_note.clone())); let mock_chain = builder.build()?; Ok(SwapTestSetup { @@ -347,21 +360,3 @@ fn setup_swap_test(payback_note_type: NoteType) -> anyhow::Result payback_note, }) } - -/// Generates a P2ID note - Pay-to-ID note with an exact serial number -pub fn create_p2id_note_exact( - sender: AccountId, - target: AccountId, - assets: Vec, - note_type: NoteType, - serial_num: Word, -) -> Result { - let recipient = P2idNoteStorage::new(target).into_recipient(serial_num); - - let tag = NoteTag::with_account_target(target); - - let metadata = NoteMetadata::new(sender, note_type).with_tag(tag); - let vault = NoteAssets::new(assets)?; - - Ok(Note::new(vault, metadata, recipient)) -} diff --git a/crates/miden-testing/tests/wallet/mod.rs b/crates/miden-testing/tests/wallet/mod.rs index 6c59b491a9..0fff293ddf 100644 --- a/crates/miden-testing/tests/wallet/mod.rs +++ b/crates/miden-testing/tests/wallet/mod.rs @@ -16,8 +16,8 @@ fn wallet_creation() { let seed = [0_u8; 32]; let mut rng = ChaCha20Rng::from_seed(seed); - let sec_key = AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng); - let auth_scheme = auth::AuthScheme::Falcon512Rpo; + let sec_key = AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng); + let auth_scheme = auth::AuthScheme::Falcon512Poseidon2; let pub_key = sec_key.public_key().to_commitment(); let auth_method: AuthMethod = AuthMethod::SingleSig { approver: (pub_key, auth_scheme) }; diff --git a/crates/miden-tx/src/auth/tx_authenticator.rs b/crates/miden-tx/src/auth/tx_authenticator.rs index ccd5f73b30..877d29aa61 100644 --- a/crates/miden-tx/src/auth/tx_authenticator.rs +++ b/crates/miden-tx/src/auth/tx_authenticator.rs @@ -11,7 +11,13 @@ use miden_protocol::transaction::TransactionSummary; use miden_protocol::{Felt, Hasher, Word}; use crate::errors::AuthenticationError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // SIGNATURE DATA // ================================================================================================ @@ -284,14 +290,14 @@ impl TransactionAuthenticator for () { #[cfg(test)] mod test { use miden_protocol::account::auth::AuthSecretKey; - use miden_protocol::utils::{Deserializable, Serializable}; + use miden_protocol::utils::serde::{Deserializable, Serializable}; use miden_protocol::{Felt, Word}; use super::SigningInputs; #[test] fn serialize_auth_key() { - let auth_key = AuthSecretKey::new_falcon512_rpo(); + let auth_key = AuthSecretKey::new_falcon512_poseidon2(); let serialized = auth_key.to_bytes(); let deserialized = AuthSecretKey::read_from_bytes(&serialized).unwrap(); diff --git a/crates/miden-tx/src/errors/mod.rs b/crates/miden-tx/src/errors/mod.rs index 4eae85f935..f9727fcae2 100644 --- a/crates/miden-tx/src/errors/mod.rs +++ b/crates/miden-tx/src/errors/mod.rs @@ -3,9 +3,10 @@ use alloc::string::String; use alloc::vec::Vec; use core::error::Error; -use miden_processor::{DeserializationError, ExecutionError}; -use miden_protocol::account::AccountId; +use miden_processor::ExecutionError; +use miden_processor::serde::DeserializationError; use miden_protocol::account::auth::PublicKeyCommitment; +use miden_protocol::account::{AccountId, StorageMapKey}; use miden_protocol::assembly::diagnostics::reporting::PrintDiagnostic; use miden_protocol::asset::AssetVaultKey; use miden_protocol::block::BlockNumber; @@ -15,6 +16,7 @@ use miden_protocol::errors::{ AccountError, AssetError, NoteError, + OutputNoteError, ProvenTransactionError, TransactionInputError, TransactionInputsExtractionError, @@ -148,6 +150,8 @@ pub enum TransactionProverError { RemoveFeeAssetFromDelta(#[source] AccountDeltaError), #[error("failed to construct transaction outputs")] TransactionOutputConstructionFailed(#[source] TransactionOutputError), + #[error("failed to shrink output note")] + OutputNoteShrinkFailed(#[source] OutputNoteError), #[error("failed to build proven transaction")] ProvenTransactionBuildFailed(#[source] ProvenTransactionError), // Print the diagnostic directly instead of returning the source error. In the source error @@ -286,7 +290,7 @@ pub enum TransactionKernelError { )] GetStorageMapWitness { map_root: Word, - map_key: Word, + map_key: StorageMapKey, // thiserror will return this when calling Error::source on TransactionKernelError. source: DataStoreError, }, diff --git a/crates/miden-tx/src/executor/data_store.rs b/crates/miden-tx/src/executor/data_store.rs index 79454eb9e6..e0525d4914 100644 --- a/crates/miden-tx/src/executor/data_store.rs +++ b/crates/miden-tx/src/executor/data_store.rs @@ -2,7 +2,7 @@ use alloc::collections::BTreeSet; use alloc::vec::Vec; use miden_processor::{FutureMaybeSend, MastForestStore, Word}; -use miden_protocol::account::{AccountId, PartialAccount, StorageMapWitness}; +use miden_protocol::account::{AccountId, PartialAccount, StorageMapKey, StorageMapWitness}; use miden_protocol::asset::{AssetVaultKey, AssetWitness}; use miden_protocol::block::{BlockHeader, BlockNumber}; use miden_protocol::note::NoteScript; @@ -67,7 +67,7 @@ pub trait DataStore: MastForestStore { &self, account_id: AccountId, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> impl FutureMaybeSend>; /// Returns a note script with the specified root, or `None` if not found. diff --git a/crates/miden-tx/src/executor/exec_host.rs b/crates/miden-tx/src/executor/exec_host.rs index 640b5ee908..66bbbbdb7a 100644 --- a/crates/miden-tx/src/executor/exec_host.rs +++ b/crates/miden-tx/src/executor/exec_host.rs @@ -3,21 +3,17 @@ use alloc::collections::{BTreeMap, BTreeSet}; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - AsyncHost, - BaseHost, - EventError, - FutureMaybeSend, - MastForest, - ProcessState, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::EventError; +use miden_processor::mast::MastForest; +use miden_processor::{FutureMaybeSend, Host, ProcessorState}; use miden_protocol::account::auth::PublicKeyCommitment; use miden_protocol::account::{ AccountCode, AccountDelta, AccountId, PartialAccount, + StorageMapKey, StorageSlotId, StorageSlotName, }; @@ -30,11 +26,11 @@ use miden_protocol::note::{NoteMetadata, NoteRecipient, NoteScript, NoteStorage} use miden_protocol::transaction::{ InputNote, InputNotes, - OutputNote, + RawOutputNote, TransactionAdviceInputs, TransactionSummary, }; -use miden_protocol::vm::AdviceMap; +use miden_protocol::vm::{AdviceMap, EventId, EventName}; use miden_protocol::{Felt, Hasher, Word}; use miden_standards::note::StandardNote; @@ -240,7 +236,7 @@ where .account_delta_tracker() .vault_delta() .fungible() - .amount(&initial_fee_asset.faucet_id()) + .amount(&initial_fee_asset.vault_key()) .unwrap_or(0); // SAFETY: Initial native asset faucet ID should be a fungible faucet and amount should @@ -267,8 +263,8 @@ where // Return an error if the balance in the account does not cover the fee. if current_fee_asset.amount() < fee_asset.amount() { return Err(TransactionKernelError::InsufficientFee { - account_balance: current_fee_asset.amount(), - tx_fee: fee_asset.amount(), + account_balance: current_fee_asset.amount().inner(), + tx_fee: fee_asset.amount().inner(), }); } @@ -283,7 +279,7 @@ where &self, active_account_id: AccountId, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> Result, TransactionKernelError> { let storage_map_witness = self .base_host @@ -303,7 +299,7 @@ where let smt_proof = SmtProof::from(storage_map_witness); let map_ext = AdviceMutation::extend_map(AdviceMap::from_iter([( smt_proof.leaf().hash(), - smt_proof.leaf().to_elements(), + smt_proof.leaf().to_elements().collect::>(), )])); Ok(vec![merkle_store_ext, map_ext]) @@ -445,7 +441,7 @@ where ) -> ( AccountDelta, InputNotes, - Vec, + Vec, Vec, BTreeMap>, TransactionProgress, @@ -468,10 +464,10 @@ where // HOST IMPLEMENTATION // ================================================================================================ -impl BaseHost for TransactionExecutorHost<'_, '_, STORE, AUTH> +impl Host for TransactionExecutorHost<'_, '_, STORE, AUTH> where - STORE: DataStore, - AUTH: TransactionAuthenticator, + STORE: DataStore + Sync, + AUTH: TransactionAuthenticator + Sync, { fn get_label_and_source_file( &self, @@ -482,13 +478,7 @@ where let span = source_manager.location_to_span(location.clone()).unwrap_or_default(); (span, maybe_file) } -} -impl AsyncHost for TransactionExecutorHost<'_, '_, STORE, AUTH> -where - STORE: DataStore + Sync, - AUTH: TransactionAuthenticator + Sync, -{ fn get_mast_forest(&self, node_digest: &Word) -> impl FutureMaybeSend>> { let mast_forest = self.base_host.get_mast_forest(node_digest); async move { mast_forest } @@ -496,7 +486,7 @@ where fn on_event( &mut self, - process: &ProcessState, + process: &ProcessorState, ) -> impl FutureMaybeSend, EventError>> { let core_lib_event_result = self.base_host.handle_core_lib_events(process); @@ -702,6 +692,10 @@ where result.map_err(EventError::from) } } + + fn resolve_event(&self, event_id: EventId) -> Option<&EventName> { + self.base_host.resolve_event(event_id) + } } // HELPER FUNCTIONS @@ -716,7 +710,7 @@ fn asset_witness_to_advice_mutation(asset_witness: AssetWitness) -> [AdviceMutat let smt_proof = SmtProof::from(asset_witness); let map_ext = AdviceMutation::extend_map(AdviceMap::from_iter([( smt_proof.leaf().hash(), - smt_proof.leaf().to_elements(), + smt_proof.leaf().to_elements().collect::>(), )])); [merkle_store_ext, map_ext] diff --git a/crates/miden-tx/src/executor/mod.rs b/crates/miden-tx/src/executor/mod.rs index 1dc2378404..9cfe1dde14 100644 --- a/crates/miden-tx/src/executor/mod.rs +++ b/crates/miden-tx/src/executor/mod.rs @@ -1,8 +1,9 @@ use alloc::collections::BTreeSet; use alloc::sync::Arc; +use core::marker::PhantomData; -use miden_processor::fast::FastProcessor; -use miden_processor::{AdviceInputs, ExecutionError, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ExecutionError, FastProcessor, StackInputs}; pub use miden_processor::{ExecutionOptions, MastForestStore}; use miden_protocol::account::AccountId; use miden_protocol::assembly::DefaultSourceManager; @@ -40,6 +41,9 @@ pub use notes_checker::{ NoteConsumptionInfo, }; +mod program_executor; +pub use program_executor::ProgramExecutor; + // TRANSACTION EXECUTOR // ================================================================================================ @@ -52,11 +56,18 @@ pub use notes_checker::{ /// The transaction executor uses dynamic dispatch with trait objects for the [DataStore] and /// [TransactionAuthenticator], allowing it to be used with different backend implementations. /// At the moment of execution, the [DataStore] is expected to provide all required MAST nodes. -pub struct TransactionExecutor<'store, 'auth, STORE: 'store, AUTH: 'auth> { +pub struct TransactionExecutor< + 'store, + 'auth, + STORE: 'store, + AUTH: 'auth, + EXEC: ProgramExecutor = FastProcessor, +> { data_store: &'store STORE, authenticator: Option<&'auth AUTH>, source_manager: Arc, exec_options: ExecutionOptions, + _executor: PhantomData, } impl<'store, 'auth, STORE, AUTH> TransactionExecutor<'store, 'auth, STORE, AUTH> @@ -71,19 +82,48 @@ where /// /// The created executor will not have the authenticator or source manager set, and tracing and /// debug mode will be turned off. + /// + /// By default, the executor uses [`FastProcessor`](miden_processor::FastProcessor) for program + /// execution. Use [`with_program_executor`](Self::with_program_executor) to plug in a + /// different execution engine. pub fn new(data_store: &'store STORE) -> Self { const _: () = assert!(MIN_TX_EXECUTION_CYCLES <= MAX_TX_EXECUTION_CYCLES); - TransactionExecutor { + Self { data_store, authenticator: None, source_manager: Arc::new(DefaultSourceManager::default()), exec_options: ExecutionOptions::new( Some(MAX_TX_EXECUTION_CYCLES), MIN_TX_EXECUTION_CYCLES, + ExecutionOptions::DEFAULT_CORE_TRACE_FRAGMENT_SIZE, false, false, ) .expect("Must not fail while max cycles is more than min trace length"), + _executor: PhantomData, + } + } +} + +impl<'store, 'auth, STORE, AUTH, EXEC> TransactionExecutor<'store, 'auth, STORE, AUTH, EXEC> +where + STORE: DataStore + 'store + Sync, + AUTH: TransactionAuthenticator + 'auth + Sync, + EXEC: ProgramExecutor, +{ + /// Replaces the transaction program executor with a different implementation. + /// + /// This allows plugging in alternative execution engines while preserving the rest of the + /// transaction executor configuration. + pub fn with_program_executor( + self, + ) -> TransactionExecutor<'store, 'auth, STORE, AUTH, EXEC2> { + TransactionExecutor::<'store, 'auth, STORE, AUTH, EXEC2> { + data_store: self.data_store, + authenticator: self.authenticator, + source_manager: self.source_manager, + exec_options: self.exec_options, + _executor: PhantomData, } } @@ -148,7 +188,7 @@ where /// stages of transaction execution take. #[must_use] pub fn with_tracing(mut self) -> Self { - self.exec_options = self.exec_options.with_tracing(); + self.exec_options = self.exec_options.with_tracing(true); self } @@ -185,13 +225,7 @@ where // instantiate the processor in debug mode only when debug mode is specified via execution // options; this is important because in debug mode execution is almost 100x slower - // TODO: the processor does not yet respect other execution options (e.g., max cycles); - // this will be fixed in v0.21 release of the VM - let processor = if self.exec_options.enable_debugging() { - FastProcessor::new_debug(stack_inputs.as_slice(), advice_inputs) - } else { - FastProcessor::new_with_advice_inputs(stack_inputs.as_slice(), advice_inputs) - }; + let processor = EXEC::new(stack_inputs, advice_inputs, self.exec_options); let output = processor .execute(&TransactionKernel::main(), &mut host) @@ -237,8 +271,7 @@ where let (mut host, stack_inputs, advice_inputs) = self.prepare_transaction(&tx_inputs).await?; - let processor = - FastProcessor::new_with_advice_inputs(stack_inputs.as_slice(), advice_inputs); + let processor = EXEC::new(stack_inputs, advice_inputs, self.exec_options); let output = processor .execute(&TransactionKernel::tx_script_main(), &mut host) .await @@ -274,7 +307,7 @@ where let native_account_vault_root = account.vault().root(); let fee_asset_vault_key = - AssetVaultKey::from_account_id(block_header.fee_parameters().native_asset_id()) + AssetVaultKey::new_fungible(block_header.fee_parameters().native_asset_id()) .expect("fee asset should be a fungible asset"); let mut tx_inputs = TransactionInputs::new(account, block_header, blockchain, input_notes) @@ -316,14 +349,6 @@ where TransactionExecutorError, > { let (stack_inputs, tx_advice_inputs) = TransactionKernel::prepare_inputs(tx_inputs); - - // This reverses the stack inputs (even though it doesn't look like it does) because the - // fast processor expects the reverse order. - // - // Once we use the FastProcessor for execution and proving, we can change the way these - // inputs are constructed in TransactionKernel::prepare_inputs. - let stack_inputs = StackInputs::new(stack_inputs.iter().copied().collect()).unwrap(); - let input_notes = tx_inputs.input_notes(); let script_mast_store = ScriptMastForestStore::new( @@ -339,14 +364,14 @@ where let initial_fee_asset_balance = { let vault_root = tx_inputs.account().vault().root(); let native_asset_id = tx_inputs.block_header().fee_parameters().native_asset_id(); - let fee_asset_vault_key = AssetVaultKey::from_account_id(native_asset_id) + let fee_asset_vault_key = AssetVaultKey::new_fungible(native_asset_id) .expect("fee asset should be a fungible asset"); let fee_asset = tx_inputs .read_vault_asset(vault_root, fee_asset_vault_key) .map_err(TransactionExecutorError::FeeAssetRetrievalFailed)?; match fee_asset { - Some(Asset::Fungible(fee_asset)) => fee_asset.amount(), + Some(Asset::Fungible(fee_asset)) => fee_asset.amount().inner(), Some(Asset::NonFungible(_)) => { return Err(TransactionExecutorError::FeeAssetMustBeFungible); }, diff --git a/crates/miden-tx/src/executor/notes_checker.rs b/crates/miden-tx/src/executor/notes_checker.rs index 188839496c..69b71869e8 100644 --- a/crates/miden-tx/src/executor/notes_checker.rs +++ b/crates/miden-tx/src/executor/notes_checker.rs @@ -1,7 +1,7 @@ use alloc::collections::BTreeMap; use alloc::vec::Vec; -use miden_processor::fast::FastProcessor; +use miden_processor::advice::AdviceInputs; use miden_protocol::account::AccountId; use miden_protocol::block::BlockNumber; use miden_protocol::note::Note; @@ -12,10 +12,9 @@ use miden_protocol::transaction::{ TransactionInputs, TransactionKernel, }; -use miden_prover::AdviceInputs; use miden_standards::note::{NoteConsumptionStatus, StandardNote}; -use super::TransactionExecutor; +use super::{ProgramExecutor, TransactionExecutor}; use crate::auth::TransactionAuthenticator; use crate::errors::TransactionCheckerError; use crate::executor::map_execution_error; @@ -73,15 +72,18 @@ impl NoteConsumptionInfo { /// The check is performed using the [NoteConsumptionChecker::check_notes_consumability] procedure. /// Essentially runs the transaction to make sure that provided input notes could be consumed by the /// account. -pub struct NoteConsumptionChecker<'a, STORE, AUTH>(&'a TransactionExecutor<'a, 'a, STORE, AUTH>); +pub struct NoteConsumptionChecker<'a, STORE, AUTH, EXEC: ProgramExecutor>( + &'a TransactionExecutor<'a, 'a, STORE, AUTH, EXEC>, +); -impl<'a, STORE, AUTH> NoteConsumptionChecker<'a, STORE, AUTH> +impl<'a, STORE, AUTH, EXEC> NoteConsumptionChecker<'a, STORE, AUTH, EXEC> where STORE: DataStore + Sync, AUTH: TransactionAuthenticator + Sync, + EXEC: ProgramExecutor, { /// Creates a new [`NoteConsumptionChecker`] instance with the given transaction executor. - pub fn new(tx_executor: &'a TransactionExecutor<'a, 'a, STORE, AUTH>) -> Self { + pub fn new(tx_executor: &'a TransactionExecutor<'a, 'a, STORE, AUTH, EXEC>) -> Self { NoteConsumptionChecker(tx_executor) } @@ -337,8 +339,7 @@ where .await .map_err(TransactionCheckerError::TransactionPreparation)?; - let processor = - FastProcessor::new_with_advice_inputs(stack_inputs.as_slice(), advice_inputs); + let processor = EXEC::new(stack_inputs, advice_inputs, self.0.exec_options); let result = processor .execute(&TransactionKernel::main(), &mut host) .await diff --git a/crates/miden-tx/src/executor/program_executor.rs b/crates/miden-tx/src/executor/program_executor.rs new file mode 100644 index 0000000000..f4dc8eaa1d --- /dev/null +++ b/crates/miden-tx/src/executor/program_executor.rs @@ -0,0 +1,52 @@ +use miden_processor::advice::AdviceInputs; +use miden_processor::{ + ExecutionError, + ExecutionOptions, + ExecutionOutput, + FastProcessor, + FutureMaybeSend, + Host, + Program, + StackInputs, +}; + +/// A transaction-scoped program executor used by +/// [`TransactionExecutor`](super::TransactionExecutor). +/// +/// TODO: Move this trait into `miden-vm` once the executor boundary is +/// consolidated there. +pub trait ProgramExecutor { + /// Create a new executor configured with the provided transaction inputs and options. + fn new( + stack_inputs: StackInputs, + advice_inputs: AdviceInputs, + options: ExecutionOptions, + ) -> Self + where + Self: Sized; + + /// Execute the provided program against the given host. + fn execute( + self, + program: &Program, + host: &mut H, + ) -> impl FutureMaybeSend>; +} + +impl ProgramExecutor for FastProcessor { + fn new( + stack_inputs: StackInputs, + advice_inputs: AdviceInputs, + options: ExecutionOptions, + ) -> Self { + FastProcessor::new_with_options(stack_inputs, advice_inputs, options) + } + + fn execute( + self, + program: &Program, + host: &mut H, + ) -> impl FutureMaybeSend> { + FastProcessor::execute(self, program, host) + } +} diff --git a/crates/miden-tx/src/host/account_delta_tracker.rs b/crates/miden-tx/src/host/account_delta_tracker.rs index f62e7996e8..889470b735 100644 --- a/crates/miden-tx/src/host/account_delta_tracker.rs +++ b/crates/miden-tx/src/host/account_delta_tracker.rs @@ -1,3 +1,4 @@ +use miden_protocol::Felt; use miden_protocol::account::{ AccountCode, AccountDelta, @@ -5,7 +6,6 @@ use miden_protocol::account::{ AccountVaultDelta, PartialAccount, }; -use miden_protocol::{Felt, FieldElement, ZERO}; use crate::host::storage_delta_tracker::StorageDeltaTracker; @@ -44,7 +44,7 @@ impl AccountDeltaTracker { storage: StorageDeltaTracker::new(account), vault: AccountVaultDelta::default(), code, - nonce_delta: ZERO, + nonce_delta: Felt::ZERO, } } diff --git a/crates/miden-tx/src/host/kernel_process.rs b/crates/miden-tx/src/host/kernel_process.rs index d94e88a7a1..dc8ec218aa 100644 --- a/crates/miden-tx/src/host/kernel_process.rs +++ b/crates/miden-tx/src/host/kernel_process.rs @@ -1,4 +1,4 @@ -use miden_processor::{ExecutionError, Felt, ProcessState}; +use miden_processor::{ExecutionError, Felt, ProcessorState}; use miden_protocol::Word; use miden_protocol::account::{AccountId, StorageSlotId, StorageSlotType}; use miden_protocol::note::{NoteId, NoteStorage}; @@ -67,20 +67,21 @@ pub(super) trait TransactionKernelProcess { ) -> Result; } -impl<'a> TransactionKernelProcess for ProcessState<'a> { +impl<'a> TransactionKernelProcess for ProcessorState<'a> { fn get_active_account_ptr(&self) -> Result { let account_stack_top_ptr = self.get_mem_value(self.ctx(), ACCOUNT_STACK_TOP_PTR).ok_or_else(|| { TransactionKernelError::other("account stack top ptr should be initialized") })?; - let account_stack_top_ptr = u32::try_from(account_stack_top_ptr).map_err(|_| { - TransactionKernelError::other("account stack top ptr should fit into a u32") - })?; + let account_stack_top_ptr = u32::try_from(account_stack_top_ptr.as_canonical_u64()) + .map_err(|_| { + TransactionKernelError::other("account stack top ptr should fit into a u32") + })?; let active_account_ptr = self .get_mem_value(self.ctx(), account_stack_top_ptr) .ok_or_else(|| TransactionKernelError::other("account id should be initialized"))?; - u32::try_from(active_account_ptr) + u32::try_from(active_account_ptr.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("active account ptr should fit into a u32")) } @@ -95,10 +96,10 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { TransactionKernelError::other("active account id should be initialized") })?; - AccountId::try_from([ - active_account_id_and_nonce[ACCT_ID_PREFIX_IDX], + AccountId::try_from_elements( active_account_id_and_nonce[ACCT_ID_SUFFIX_IDX], - ]) + active_account_id_and_nonce[ACCT_ID_PREFIX_IDX], + ) .map_err(|_| { TransactionKernelError::other( "active account id ptr should point to a valid account ID", @@ -135,14 +136,14 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { NATIVE_NUM_ACCT_STORAGE_SLOTS_PTR, ))?; - Ok(num_storage_slots_felt.as_int()) + Ok(num_storage_slots_felt.as_canonical_u64()) } fn get_num_output_notes(&self) -> u64 { // Read the number from memory or default to 0 if the location hasn't been accessed // previously (e.g. when no notes have been created yet). self.get_mem_value(self.ctx(), NUM_OUTPUT_NOTES_PTR) - .map(|num_output_notes| num_output_notes.as_int()) + .map(|num_output_notes| num_output_notes.as_canonical_u64()) .unwrap_or(0) } @@ -160,7 +161,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { None => return Ok(None), }; // convert note address into u32 - let note_address = u32::try_from(note_address_felt).map_err(|_| { + let note_address = u32::try_from(note_address_felt.as_canonical_u64()).map_err(|_| { TransactionKernelError::other(format!( "failed to convert {note_address_felt} into a memory address (u32)" )) @@ -174,7 +175,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { .map_err(|err| { TransactionKernelError::other_with_source( "failed to read note address", - ExecutionError::MemoryError(err), + ExecutionError::MemoryErrorNoCtx(err), ) })? .map(NoteId::from_raw)) @@ -183,7 +184,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { /// Returns the vault root at the provided pointer. fn get_vault_root(&self, vault_root_ptr: Felt) -> Result { - let vault_root_ptr = u32::try_from(vault_root_ptr).map_err(|_err| { + let vault_root_ptr = u32::try_from(vault_root_ptr.as_canonical_u64()).map_err(|_err| { TransactionKernelError::other(format!( "vault root ptr should fit into a u32, but was {vault_root_ptr}" )) @@ -205,7 +206,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { &self, slot_ptr: Felt, ) -> Result<(StorageSlotId, StorageSlotType, Word), TransactionKernelError> { - let slot_ptr = u32::try_from(slot_ptr).map_err(|_err| { + let slot_ptr = u32::try_from(slot_ptr.as_canonical_u64()).map_err(|_err| { TransactionKernelError::other(format!( "slot ptr should fit into a u32, but was {slot_ptr}" )) @@ -239,7 +240,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { })?; let slot_type = slot_metadata[ACCT_STORAGE_SLOT_TYPE_OFFSET as usize]; - let slot_type = u8::try_from(slot_type).map_err(|err| { + let slot_type = u8::try_from(slot_type.as_canonical_u64()).map_err(|err| { TransactionKernelError::other(format!("failed to convert {slot_type} into u8: {err}")) })?; let slot_type = StorageSlotType::try_from(slot_type).map_err(|err| { @@ -324,7 +325,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { /// Returns an error if the key is not present in the advice map or if the data is malformed /// (not exactly 8 elements). fn read_double_word_from_adv_map( - process: &ProcessState, + process: &ProcessorState, key: Word, ) -> Result<(Word, Word), TransactionKernelError> { let data = process diff --git a/crates/miden-tx/src/host/link_map.rs b/crates/miden-tx/src/host/link_map.rs index 5024c86dfd..41353a30d1 100644 --- a/crates/miden-tx/src/host/link_map.rs +++ b/crates/miden-tx/src/host/link_map.rs @@ -1,8 +1,8 @@ use alloc::vec::Vec; use core::cmp::Ordering; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{AdviceMutation, ContextId, ProcessState}; +use miden_processor::advice::AdviceMutation; +use miden_processor::{ContextId, ExecutionOutput, ProcessorState}; use miden_protocol::{Felt, LexicographicWord, Word, ZERO}; // LINK MAP @@ -30,7 +30,8 @@ impl<'process> LinkMap<'process> { /// Creates a new link map from the provided map_ptr in the provided process. pub fn new(map_ptr: Felt, mem: &'process MemoryViewer<'process>) -> Self { - let map_ptr: u32 = map_ptr.try_into().expect("map_ptr must be a valid u32"); + let map_ptr: u32 = + u32::try_from(map_ptr.as_canonical_u64()).expect("map_ptr must be a valid u32"); Self { map_ptr, mem } } @@ -41,32 +42,32 @@ impl<'process> LinkMap<'process> { /// Handles a `LINK_MAP_SET_EVENT` emitted from a VM. /// /// Expected operand stack state before: [map_ptr, KEY, NEW_VALUE] - /// Advice stack state after: [set_operation, entry_ptr] - pub fn handle_set_event(process: &ProcessState<'_>) -> Vec { + /// Advice stack state after: [entry_ptr, set_operation] + pub fn handle_set_event(process: &ProcessorState<'_>) -> Vec { let map_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); let mem_viewer = MemoryViewer::ProcessState(process); let link_map = LinkMap::new(map_ptr, &mem_viewer); let (set_op, entry_ptr) = link_map.compute_set_operation(LexicographicWord::from(map_key)); - vec![AdviceMutation::extend_stack([Felt::from(set_op as u8), Felt::from(entry_ptr)])] + vec![AdviceMutation::extend_stack([Felt::from(entry_ptr), Felt::from(set_op as u8)])] } /// Handles a `LINK_MAP_GET_EVENT` emitted from a VM. /// /// Expected operand stack state before: [map_ptr, KEY] - /// Advice stack state after: [get_operation, entry_ptr] - pub fn handle_get_event(process: &ProcessState<'_>) -> Vec { + /// Advice stack state after: [entry_ptr, get_operation] + pub fn handle_get_event(process: &ProcessorState<'_>) -> Vec { let map_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); let mem_viewer = MemoryViewer::ProcessState(process); let link_map = LinkMap::new(map_ptr, &mem_viewer); let (get_op, entry_ptr) = link_map.compute_get_operation(LexicographicWord::from(map_key)); - vec![AdviceMutation::extend_stack([Felt::from(get_op as u8), Felt::from(entry_ptr)])] + vec![AdviceMutation::extend_stack([Felt::from(entry_ptr), Felt::from(get_op as u8)])] } /// Returns `true` if the map is empty, `false` otherwise. @@ -94,7 +95,10 @@ impl<'process> LinkMap<'process> { if head_ptr == ZERO { None } else { - Some(u32::try_from(head_ptr).expect("head ptr should be a valid ptr")) + Some( + u32::try_from(head_ptr.as_canonical_u64()) + .expect("head ptr should be a valid ptr"), + ) } }) } @@ -142,16 +146,15 @@ impl<'process> LinkMap<'process> { self.mem.get_kernel_mem_word(entry_ptr).expect("entry pointer should be valid"); let map_ptr = entry_metadata[0]; - let map_ptr = map_ptr.try_into().expect("entry_ptr should point to a u32 map_ptr"); + let map_ptr = u32::try_from(map_ptr.as_canonical_u64()) + .expect("entry_ptr should point to a u32 map_ptr"); let prev_entry_ptr = entry_metadata[1]; - let prev_entry_ptr = prev_entry_ptr - .try_into() + let prev_entry_ptr = u32::try_from(prev_entry_ptr.as_canonical_u64()) .expect("entry_ptr should point to a u32 prev_entry_ptr"); let next_entry_ptr = entry_metadata[2]; - let next_entry_ptr = next_entry_ptr - .try_into() + let next_entry_ptr = u32::try_from(next_entry_ptr.as_canonical_u64()) .expect("entry_ptr should point to a u32 next_entry_ptr"); EntryMetadata { map_ptr, prev_entry_ptr, next_entry_ptr } @@ -293,14 +296,14 @@ enum SetOperation { /// A abstraction over ways to view a process' memory. /// -/// More specifically, it allows using a [`LinkMap`] both with a [`ProcessState`], i.e. a process +/// More specifically, it allows using a [`LinkMap`] both with a [`ProcessorState`], i.e. a process /// that is actively executing and also an [`ExecutionOutput`], i.e. a process that has finished /// execution. /// /// This should all go away again once we change a LinkMap's implementation to be based on an actual /// map type instead of viewing a process' memory directly. pub enum MemoryViewer<'mem> { - ProcessState(&'mem ProcessState<'mem>), + ProcessState(&'mem ProcessorState<'mem>), ExecutionOutputs(&'mem ExecutionOutput), } @@ -333,14 +336,13 @@ impl<'mem> MemoryViewer<'mem> { MemoryViewer::ExecutionOutputs(execution_output) => { let tx_kernel_context = ContextId::root(); let clk = 0u32; - let err_ctx = (); // Note that this never returns None even if the location is uninitialized, but the // link map does not rely on this. Some( execution_output .memory - .read_word(tx_kernel_context, Felt::from(addr), clk.into(), &err_ctx) + .read_word(tx_kernel_context, Felt::from(addr), clk.into()) .expect("expected address to be word-aligned"), ) }, diff --git a/crates/miden-tx/src/host/mod.rs b/crates/miden-tx/src/host/mod.rs index 019a3a0291..b0f40dac3b 100644 --- a/crates/miden-tx/src/host/mod.rs +++ b/crates/miden-tx/src/host/mod.rs @@ -11,7 +11,8 @@ pub use account_procedures::AccountProcedureIndexMap; pub(crate) mod note_builder; use miden_protocol::CoreLibrary; -use miden_protocol::vm::EventId; +use miden_protocol::transaction::TransactionEventId; +use miden_protocol::vm::{EventId, EventName}; use note_builder::OutputNoteBuilder; mod kernel_process; @@ -28,15 +29,11 @@ use alloc::collections::BTreeMap; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - EventError, - EventHandlerRegistry, - Felt, - MastForest, - MastForestStore, - ProcessState, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::{EventError, EventHandlerRegistry}; +use miden_processor::mast::MastForest; +use miden_processor::trace::RowIndex; +use miden_processor::{Felt, MastForestStore, ProcessorState}; use miden_protocol::Word; use miden_protocol::account::{ AccountCode, @@ -45,6 +42,7 @@ use miden_protocol::account::{ AccountId, AccountStorageHeader, PartialAccount, + StorageMapKey, StorageSlotHeader, StorageSlotId, StorageSlotName, @@ -54,12 +52,11 @@ use miden_protocol::note::{NoteAttachment, NoteId, NoteMetadata, NoteRecipient}; use miden_protocol::transaction::{ InputNote, InputNotes, - OutputNote, - OutputNotes, + RawOutputNote, + RawOutputNotes, TransactionMeasurements, TransactionSummary, }; -use miden_protocol::vm::RowIndex; pub(crate) use tx_event::{RecipientData, TransactionEvent, TransactionProgressEvent}; pub use tx_progress::TransactionProgress; @@ -192,12 +189,12 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { /// Clones the inner [`OutputNoteBuilder`]s and returns the vector of created output notes that /// are tracked by this host. - pub fn build_output_notes(&self) -> Vec { + pub fn build_output_notes(&self) -> Vec { self.output_notes.values().cloned().map(|builder| builder.build()).collect() } /// Consumes `self` and returns the account delta, input and output notes. - pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { + pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { let output_notes = self.output_notes.into_values().map(|builder| builder.build()).collect(); (self.account_delta.into_delta(), self.input_notes, output_notes) @@ -269,7 +266,7 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { /// Returns `Some` if the event was handled, `None` otherwise. pub fn handle_core_lib_events( &self, - process: &ProcessState, + process: &ProcessorState, ) -> Result>, EventError> { let event_id = EventId::from_felt(process.get_stack_item(0)); if let Some(mutations) = self.core_lib_handlers.handle_event(event_id, process)? { @@ -279,6 +276,20 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { } } + /// Resolves an [`EventId`] to its corresponding [`EventName`], if known. + /// + /// First checks if the event is a core library event, then checks if it is a transaction + /// kernel event. + pub fn resolve_event(&self, event_id: EventId) -> Option<&EventName> { + if let Some(name) = self.core_lib_handlers.resolve_event(event_id) { + return Some(name); + } + + TransactionEventId::try_from(event_id) + .ok() + .map(|event_id| event_id.event_name()) + } + /// Converts the provided signature into an advice mutation that pushes it onto the advice stack /// as a response to an `AuthRequest` event. pub fn on_auth_requested(&self, signature: Vec) -> Vec { @@ -358,7 +369,7 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { pub fn on_account_storage_after_set_map_item( &mut self, slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, old_map_value: Word, new_map_value: Word, ) -> Result, TransactionKernelError> { @@ -405,15 +416,15 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { /// provided commitments. pub(crate) fn build_tx_summary( &self, - salt: Word, - output_notes_commitment: Word, - input_notes_commitment: Word, account_delta_commitment: Word, + input_notes_commitment: Word, + output_notes_commitment: Word, + salt: Word, ) -> Result { let account_delta = self.build_account_delta(); let input_notes = self.input_notes(); let output_notes_vec = self.build_output_notes(); - let output_notes = OutputNotes::new(output_notes_vec).map_err(|err| { + let output_notes = RawOutputNotes::new(output_notes_vec).map_err(|err| { TransactionKernelError::TransactionSummaryConstructionFailed(Box::new(err)) })?; diff --git a/crates/miden-tx/src/host/note_builder.rs b/crates/miden-tx/src/host/note_builder.rs index eac4f8a006..d392c16b51 100644 --- a/crates/miden-tx/src/host/note_builder.rs +++ b/crates/miden-tx/src/host/note_builder.rs @@ -1,4 +1,7 @@ +use alloc::vec::Vec; + use miden_protocol::asset::Asset; +use miden_protocol::errors::NoteError; use miden_protocol::note::{ Note, NoteAssets, @@ -8,17 +11,21 @@ use miden_protocol::note::{ PartialNote, }; -use super::{OutputNote, Word}; +use super::{RawOutputNote, Word}; use crate::errors::TransactionKernelError; // OUTPUT NOTE BUILDER // ================================================================================================ /// Builder of an output note, provided primarily to enable adding assets to a note incrementally. +/// +/// Assets are accumulated in a `Vec` and the final `NoteAssets` is only constructed when +/// [`build`](Self::build) is called. This avoids recomputing the commitment hash on every asset +/// addition. #[derive(Debug, Clone)] pub struct OutputNoteBuilder { metadata: NoteMetadata, - assets: NoteAssets, + assets: Vec, recipient_digest: Word, recipient: Option, } @@ -50,7 +57,7 @@ impl OutputNoteBuilder { metadata, recipient_digest, recipient: None, - assets: NoteAssets::default(), + assets: Vec::new(), }) } @@ -60,7 +67,7 @@ impl OutputNoteBuilder { metadata, recipient_digest: recipient.digest(), recipient: Some(recipient), - assets: NoteAssets::default(), + assets: Vec::new(), } } @@ -78,9 +85,34 @@ impl OutputNoteBuilder { /// - Adding the asset to the note will push the list beyond the [NoteAssets::MAX_NUM_ASSETS] /// limit. pub fn add_asset(&mut self, asset: Asset) -> Result<(), TransactionKernelError> { - self.assets - .add_asset(asset) - .map_err(TransactionKernelError::FailedToAddAssetToNote)?; + // Check if an asset issued by the same faucet already exists in the list of assets. + if let Some(own_asset) = self.assets.iter_mut().find(|a| a.is_same(&asset)) { + match own_asset { + Asset::Fungible(f_own_asset) => { + // If a fungible asset issued by the same faucet is found, try to add the + // provided asset to it. + let new_asset = f_own_asset + .add(asset.unwrap_fungible()) + .map_err(NoteError::AddFungibleAssetBalanceError) + .map_err(TransactionKernelError::FailedToAddAssetToNote)?; + *own_asset = Asset::Fungible(new_asset); + }, + Asset::NonFungible(nf_asset) => { + return Err(TransactionKernelError::FailedToAddAssetToNote( + NoteError::DuplicateNonFungibleAsset(*nf_asset), + )); + }, + } + } else { + // If the asset is not in the list, add it to the list. + self.assets.push(asset); + if self.assets.len() > NoteAssets::MAX_NUM_ASSETS { + return Err(TransactionKernelError::FailedToAddAssetToNote( + NoteError::TooManyAssets(self.assets.len()), + )); + } + } + Ok(()) } @@ -91,17 +123,20 @@ impl OutputNoteBuilder { /// Converts this builder to an [OutputNote]. /// - /// Depending on the available information, this may result in [OutputNote::Full] or - /// [OutputNote::Partial] notes. - pub fn build(self) -> OutputNote { + /// Depending on the available information, this may result in [`OutputNote::Full`] or + /// [`OutputNote::Partial`] notes. + pub fn build(self) -> RawOutputNote { + let assets = NoteAssets::new(self.assets) + .expect("assets should be valid since add_asset validates them"); + match self.recipient { Some(recipient) => { - let note = Note::new(self.assets, self.metadata, recipient); - OutputNote::Full(note) + let note = Note::new(assets, self.metadata, recipient); + RawOutputNote::Full(note) }, None => { - let note = PartialNote::new(self.metadata, self.recipient_digest, self.assets); - OutputNote::Partial(note) + let note = PartialNote::new(self.metadata, self.recipient_digest, assets); + RawOutputNote::Partial(note) }, } } diff --git a/crates/miden-tx/src/host/storage_delta_tracker.rs b/crates/miden-tx/src/host/storage_delta_tracker.rs index 6270612130..86400615c7 100644 --- a/crates/miden-tx/src/host/storage_delta_tracker.rs +++ b/crates/miden-tx/src/host/storage_delta_tracker.rs @@ -6,6 +6,7 @@ use miden_protocol::account::{ AccountStorageDelta, AccountStorageHeader, PartialAccount, + StorageMapKey, StorageSlotDelta, StorageSlotHeader, StorageSlotName, @@ -32,7 +33,7 @@ pub struct StorageDeltaTracker { storage_header: AccountStorageHeader, /// A map from slot name to a map of key-value pairs where the key is a storage map key and /// the value represents the value of that key at the beginning of transaction execution. - init_maps: BTreeMap>, + init_maps: BTreeMap>, /// The account storage delta. delta: AccountStorageDelta, } @@ -111,7 +112,7 @@ impl StorageDeltaTracker { pub fn set_map_item( &mut self, slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, prev_value: Word, new_value: Word, ) { @@ -134,7 +135,12 @@ impl StorageDeltaTracker { /// Sets the initial value of the given key in the given slot to the given value, if no value is /// already tracked for that key. - fn set_init_map_item(&mut self, slot_name: StorageSlotName, key: Word, prev_value: Word) { + fn set_init_map_item( + &mut self, + slot_name: StorageSlotName, + key: StorageMapKey, + prev_value: Word, + ) { let slot_map = self.init_maps.entry(slot_name).or_default(); slot_map.entry(key).or_insert(prev_value); } diff --git a/crates/miden-tx/src/host/tx_event.rs b/crates/miden-tx/src/host/tx_event.rs index 1272248ba1..93aab405c2 100644 --- a/crates/miden-tx/src/host/tx_event.rs +++ b/crates/miden-tx/src/host/tx_event.rs @@ -1,7 +1,15 @@ use alloc::vec::Vec; -use miden_processor::{AdviceMutation, AdviceProvider, ProcessState, RowIndex}; -use miden_protocol::account::{AccountId, StorageMap, StorageSlotName, StorageSlotType}; +use miden_processor::ProcessorState; +use miden_processor::advice::{AdviceMutation, AdviceProvider}; +use miden_processor::trace::RowIndex; +use miden_protocol::account::{ + AccountId, + StorageMap, + StorageMapKey, + StorageSlotName, + StorageSlotType, +}; use miden_protocol::asset::{Asset, AssetVault, AssetVaultKey, FungibleAsset}; use miden_protocol::note::{ NoteAttachment, @@ -77,7 +85,7 @@ pub(crate) enum TransactionEvent { AccountStorageAfterSetMapItem { slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, old_value: Word, new_value: Word, }, @@ -89,7 +97,7 @@ pub(crate) enum TransactionEvent { /// The root of the storage map for which a witness is requested. map_root: Word, /// The raw map key for which a witness is requested. - map_key: Word, + map_key: StorageMapKey, }, /// The data necessary to request an asset witness from the data store. @@ -167,7 +175,7 @@ impl TransactionEvent { /// handled, `None` otherwise. pub fn extract<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, ) -> Result, TransactionKernelError> { let event_id = EventId::from_felt(process.get_stack_item(0)); let tx_event_id = TransactionEventId::try_from(event_id).map_err(|err| { @@ -179,9 +187,10 @@ impl TransactionEvent { let tx_event = match tx_event_id { TransactionEventId::AccountBeforeForeignLoad => { - // Expected stack state: [event, account_id_prefix, account_id_suffix] - let account_id_word = process.get_stack_word_be(1); - let account_id = AccountId::try_from([account_id_word[3], account_id_word[2]]) + // Expected stack state: [event, account_id_suffix, account_id_prefix] + let account_id_suffix = process.get_stack_item(1); + let account_id_prefix = process.get_stack_item(2); + let account_id = AccountId::try_from_elements(account_id_suffix, account_id_prefix) .map_err(|err| { TransactionKernelError::other_with_source( "failed to convert account ID word into account ID", @@ -193,55 +202,68 @@ impl TransactionEvent { }, TransactionEventId::AccountVaultBeforeAddAsset | TransactionEventId::AccountVaultBeforeRemoveAsset => { - // Expected stack state: [event, ASSET, account_vault_root_ptr] - let asset_word = process.get_stack_word_be(1); - let asset = Asset::try_from(asset_word).map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_account_vault_before_add_or_remove_asset", - source, - } - })?; - - let vault_root_ptr = process.get_stack_item(5); + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE, account_vault_root_ptr] + let asset_vault_key = process.get_stack_word(1); + let vault_root_ptr = process.get_stack_item(9); + + let asset_vault_key = + AssetVaultKey::try_from(asset_vault_key).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultBefore{Add,Remove}Asset", + source, + } + })?; let current_vault_root = process.get_vault_root(vault_root_ptr)?; on_account_vault_asset_accessed( base_host, process, - asset.vault_key(), + asset_vault_key, current_vault_root, )? }, TransactionEventId::AccountVaultAfterRemoveAsset => { - // Expected stack state: [event, ASSET] - let asset: Asset = process.get_stack_word_be(1).try_into().map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_account_vault_after_remove_asset", - source, - } - })?; + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE] + let asset_key = process.get_stack_word(1); + let asset_value = process.get_stack_word(5); + + let asset = + Asset::from_key_value_words(asset_key, asset_value).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultAfterRemoveAsset", + source, + } + })?; Some(TransactionEvent::AccountVaultAfterRemoveAsset { asset }) }, TransactionEventId::AccountVaultAfterAddAsset => { - // Expected stack state: [event, ASSET] - let asset: Asset = process.get_stack_word_be(1).try_into().map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_account_vault_after_add_asset", - source, - } - })?; + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE] + let asset_key = process.get_stack_word(1); + let asset_value = process.get_stack_word(5); + + let asset = + Asset::from_key_value_words(asset_key, asset_value).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultAfterAddAsset", + source, + } + })?; Some(TransactionEvent::AccountVaultAfterAddAsset { asset }) }, TransactionEventId::AccountVaultBeforeGetAsset => { // Expected stack state: // [event, ASSET_KEY, vault_root_ptr] - let asset_key = process.get_stack_word_be(1); + let asset_key = process.get_stack_word(1); let vault_root_ptr = process.get_stack_item(5); - // TODO(expand_assets): Consider whether validation is necessary. - let asset_key = AssetVaultKey::new_unchecked(asset_key); + let asset_key = AssetVaultKey::try_from(asset_key).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultBeforeGetAsset", + source, + } + })?; let vault_root = process.get_vault_root(vault_root_ptr)?; on_account_vault_asset_accessed(base_host, process, asset_key, vault_root)? @@ -252,7 +274,7 @@ impl TransactionEvent { TransactionEventId::AccountStorageAfterSetItem => { // Expected stack state: [event, slot_ptr, VALUE] let slot_ptr = process.get_stack_item(1); - let new_value = process.get_stack_word_be(2); + let new_value = process.get_stack_word(2); let (slot_id, slot_type, _old_value) = process.get_storage_slot(slot_ptr)?; @@ -271,7 +293,8 @@ impl TransactionEvent { TransactionEventId::AccountStorageBeforeGetMapItem => { // Expected stack state: [event, slot_ptr, KEY] let slot_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); + let map_key = StorageMapKey::from_raw(map_key); on_account_storage_map_item_accessed(base_host, process, slot_ptr, map_key)? }, @@ -279,7 +302,8 @@ impl TransactionEvent { TransactionEventId::AccountStorageBeforeSetMapItem => { // Expected stack state: [event, slot_ptr, KEY] let slot_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); + let map_key = StorageMapKey::from_raw(map_key); on_account_storage_map_item_accessed(base_host, process, slot_ptr, map_key)? }, @@ -287,10 +311,11 @@ impl TransactionEvent { TransactionEventId::AccountStorageAfterSetMapItem => { // Expected stack state: [event, slot_ptr, KEY, OLD_VALUE, NEW_VALUE] let slot_ptr = process.get_stack_item(1); - let key = process.get_stack_word_be(2); - let old_value = process.get_stack_word_be(6); - let new_value = process.get_stack_word_be(10); + let key = process.get_stack_word(2); + let old_value = process.get_stack_word(6); + let new_value = process.get_stack_word(10); + let key = StorageMapKey::from_raw(key); // Resolve slot ID to slot name. let (slot_id, ..) = process.get_storage_slot(slot_ptr)?; let slot_header = base_host.initial_account_storage_slot(slot_id)?; @@ -312,7 +337,7 @@ impl TransactionEvent { TransactionEventId::AccountPushProcedureIndex => { // Expected stack state: [event, PROC_ROOT] - let procedure_root = process.get_stack_word_be(1); + let procedure_root = process.get_stack_word(1); let code_commitment = process.get_active_account_code_commitment()?; Some(TransactionEvent::AccountPushProcedureIndex { @@ -325,7 +350,7 @@ impl TransactionEvent { // Expected stack state: [event, tag, note_type, RECIPIENT] let tag = process.get_stack_item(1); let note_type = process.get_stack_item(2); - let recipient_digest = process.get_stack_word_be(3); + let recipient_digest = process.get_stack_word(3); let sender = base_host.native_account_id(); let metadata = build_note_metadata(sender, note_type, tag)?; @@ -381,16 +406,19 @@ impl TransactionEvent { TransactionEventId::NoteAfterCreated => None, TransactionEventId::NoteBeforeAddAsset => { - // Expected stack state: [event, ASSET, note_ptr, num_of_assets, note_idx] - let note_idx = process.get_stack_item(7).as_int() as usize; - - let asset_word = process.get_stack_word_be(1); - let asset = Asset::try_from(asset_word).map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_note_before_add_asset", - source, - } - })?; + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE, note_idx] + let asset_key = process.get_stack_word(1); + let asset_value = process.get_stack_word(5); + let note_idx = process.get_stack_item(9); + + let asset = + Asset::from_key_value_words(asset_key, asset_value).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "NoteBeforeAddAsset", + source, + } + })?; + let note_idx = note_idx.as_canonical_u64() as usize; Some(TransactionEvent::NoteBeforeAddAsset { note_idx, asset }) }, @@ -406,7 +434,7 @@ impl TransactionEvent { let attachment_scheme = process.get_stack_item(1); let attachment_kind = process.get_stack_item(2); let note_ptr = process.get_stack_item(3); - let attachment = process.get_stack_word_be(5); + let attachment = process.get_stack_word(5); let (note_idx, attachment) = extract_note_attachment( attachment_scheme, @@ -421,8 +449,8 @@ impl TransactionEvent { TransactionEventId::AuthRequest => { // Expected stack state: [event, MESSAGE, PUB_KEY] - let message = process.get_stack_word_be(1); - let pub_key_hash = process.get_stack_word_be(5); + let message = process.get_stack_word(1); + let pub_key_hash = process.get_stack_word(5); let signature_key = Hasher::merge(&[pub_key_hash, message]); let signature = process @@ -437,16 +465,18 @@ impl TransactionEvent { TransactionEventId::Unauthorized => { // Expected stack state: [event, MESSAGE] - let message = process.get_stack_word_be(1); + let message = process.get_stack_word(1); let tx_summary = extract_tx_summary(base_host, process, message)?; Some(TransactionEvent::Unauthorized { tx_summary }) }, TransactionEventId::EpilogueBeforeTxFeeRemovedFromAccount => { - // Expected stack state: [event, FEE_ASSET] - let fee_asset = process.get_stack_word_be(1); - let fee_asset = FungibleAsset::try_from(fee_asset) + // Expected stack state: [event, FEE_ASSET_KEY, FEE_ASSET_VALUE] + let fee_asset_key = process.get_stack_word(1); + let fee_asset_value = process.get_stack_word(5); + + let fee_asset = FungibleAsset::from_key_value_words(fee_asset_key, fee_asset_value) .map_err(TransactionKernelError::FailedToConvertFeeAsset)?; Some(TransactionEvent::EpilogueBeforeTxFeeRemovedFromAccount { fee_asset }) @@ -460,17 +490,17 @@ impl TransactionEvent { }), TransactionEventId::PrologueStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::PrologueStart(process.clk()), + TransactionProgressEvent::PrologueStart(process.clock()), )), TransactionEventId::PrologueEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::PrologueEnd(process.clk()), + TransactionProgressEvent::PrologueEnd(process.clock()), )), TransactionEventId::NotesProcessingStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::NotesProcessingStart(process.clk()), + TransactionProgressEvent::NotesProcessingStart(process.clock()), )), TransactionEventId::NotesProcessingEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::NotesProcessingEnd(process.clk()), + TransactionProgressEvent::NotesProcessingEnd(process.clock()), )), TransactionEventId::NoteExecutionStart => { @@ -480,36 +510,36 @@ impl TransactionEvent { Some(TransactionEvent::Progress(TransactionProgressEvent::NoteExecutionStart { note_id, - clk: process.clk(), + clk: process.clock(), })) }, TransactionEventId::NoteExecutionEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::NoteExecutionEnd(process.clk()), + TransactionProgressEvent::NoteExecutionEnd(process.clock()), )), TransactionEventId::TxScriptProcessingStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::TxScriptProcessingStart(process.clk()), + TransactionProgressEvent::TxScriptProcessingStart(process.clock()), )), TransactionEventId::TxScriptProcessingEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::TxScriptProcessingEnd(process.clk()), + TransactionProgressEvent::TxScriptProcessingEnd(process.clock()), )), TransactionEventId::EpilogueStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueStart(process.clk()), + TransactionProgressEvent::EpilogueStart(process.clock()), )), TransactionEventId::EpilogueEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueEnd(process.clk()), + TransactionProgressEvent::EpilogueEnd(process.clock()), )), TransactionEventId::EpilogueAuthProcStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueAuthProcStart(process.clk()), + TransactionProgressEvent::EpilogueAuthProcStart(process.clock()), )), TransactionEventId::EpilogueAuthProcEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueAuthProcEnd(process.clk()), + TransactionProgressEvent::EpilogueAuthProcEnd(process.clock()), )), TransactionEventId::EpilogueAfterTxCyclesObtained => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueAfterTxCyclesObtained(process.clk()), + TransactionProgressEvent::EpilogueAfterTxCyclesObtained(process.clock()), )), }; @@ -542,11 +572,12 @@ pub(crate) enum RecipientData { /// - If not, returns `Some` with all necessary data for requesting it. fn on_account_vault_asset_accessed<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, vault_key: AssetVaultKey, vault_root: Word, ) -> Result, TransactionKernelError> { - let leaf_index = Felt::new(vault_key.to_leaf_index().value()); + let leaf_index = Felt::try_from(vault_key.to_leaf_index().position()) + .expect("expected key index to be a felt"); let active_account_id = process.get_active_account_id()?; // For the native account we need to explicitly request the initial vault root, while for @@ -579,9 +610,9 @@ fn on_account_vault_asset_accessed<'store, STORE>( /// - If not, returns `Some` with all necessary data for requesting it. fn on_account_storage_map_item_accessed<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, slot_ptr: Felt, - map_key: Word, + map_key: StorageMapKey, ) -> Result, TransactionKernelError> { let (slot_id, slot_type, current_map_root) = process.get_storage_slot(slot_ptr)?; @@ -592,8 +623,10 @@ fn on_account_storage_map_item_accessed<'store, STORE>( } let active_account_id = process.get_active_account_id()?; - let leaf_index: Felt = StorageMap::map_key_to_leaf_index(map_key) - .value() + let leaf_index: Felt = map_key + .hash() + .to_leaf_index() + .position() .try_into() .expect("expected key index to be a felt"); @@ -639,7 +672,7 @@ fn on_account_storage_map_item_accessed<'store, STORE>( /// ``` fn extract_tx_summary<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, message: Word, ) -> Result { let Some(commitments) = process.advice_provider().get_mapped_values(&message) else { @@ -654,16 +687,16 @@ fn extract_tx_summary<'store, STORE>( )); } - let salt = extract_word(commitments, 0); - let output_notes_commitment = extract_word(commitments, 4); - let input_notes_commitment = extract_word(commitments, 8); - let account_delta_commitment = extract_word(commitments, 12); + let account_delta_commitment = extract_word(commitments, 0); + let input_notes_commitment = extract_word(commitments, 4); + let output_notes_commitment = extract_word(commitments, 8); + let salt = extract_word(commitments, 12); let tx_summary = base_host.build_tx_summary( - salt, - output_notes_commitment, - input_notes_commitment, account_delta_commitment, + input_notes_commitment, + output_notes_commitment, + salt, )?; if tx_summary.to_commitment() != message { @@ -684,7 +717,7 @@ fn build_note_metadata( note_type: Felt, tag: Felt, ) -> Result { - let note_type = u8::try_from(note_type) + let note_type = u8::try_from(note_type.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to decode note_type into u8")) .and_then(|note_type_byte| { NoteType::try_from(note_type_byte).map_err(|source| { @@ -695,7 +728,7 @@ fn build_note_metadata( }) })?; - let tag = u32::try_from(tag) + let tag = u32::try_from(tag.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to decode note tag into u32")) .map(NoteTag::new)?; @@ -711,7 +744,7 @@ fn extract_note_attachment( ) -> Result<(usize, NoteAttachment), TransactionKernelError> { let note_idx = note_ptr_to_idx(note_ptr)?; - let attachment_kind = u8::try_from(attachment_kind) + let attachment_kind = u8::try_from(attachment_kind.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to convert attachment kind to u8")) .and_then(|attachment_kind| { NoteAttachmentKind::try_from(attachment_kind).map_err(|source| { @@ -722,7 +755,7 @@ fn extract_note_attachment( }) })?; - let attachment_scheme = u32::try_from(attachment_scheme) + let attachment_scheme = u32::try_from(attachment_scheme.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to convert attachment scheme to u32")) .map(NoteAttachmentScheme::new)?; @@ -781,7 +814,7 @@ fn extract_word(commitments: &[Felt], start: usize) -> Word { /// Converts the provided note ptr into the corresponding note index. fn note_ptr_to_idx(note_ptr: Felt) -> Result { - u32::try_from(note_ptr) + u32::try_from(note_ptr.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to convert note_ptr to u32")) .and_then(|note_ptr| { note_ptr diff --git a/crates/miden-tx/src/lib.rs b/crates/miden-tx/src/lib.rs index a756df72b7..3dd06bdcc5 100644 --- a/crates/miden-tx/src/lib.rs +++ b/crates/miden-tx/src/lib.rs @@ -15,6 +15,7 @@ pub use executor::{ MastForestStore, NoteConsumptionChecker, NoteConsumptionInfo, + ProgramExecutor, TransactionExecutor, TransactionExecutorHost, }; diff --git a/crates/miden-tx/src/prover/mod.rs b/crates/miden-tx/src/prover/mod.rs index f995a7db90..ec2b54ec50 100644 --- a/crates/miden-tx/src/prover/mod.rs +++ b/crates/miden-tx/src/prover/mod.rs @@ -8,12 +8,11 @@ use miden_protocol::block::BlockNumber; use miden_protocol::transaction::{ InputNote, InputNotes, - OutputNote, ProvenTransaction, - ProvenTransactionBuilder, TransactionInputs, TransactionKernel, TransactionOutputs, + TxAccountUpdate, }; pub use miden_prover::ProvingOptions; use miden_prover::{ExecutionProof, Word, prove}; @@ -56,45 +55,53 @@ impl LocalTransactionProver { proof: ExecutionProof, ) -> Result { // erase private note information (convert private full notes to just headers) - let output_notes: Vec<_> = tx_outputs.output_notes.iter().map(OutputNote::shrink).collect(); + let output_notes: Vec<_> = tx_outputs + .output_notes + .iter() + .map(|note| note.to_output_note()) + .collect::, _>>() + .map_err(TransactionProverError::OutputNoteShrinkFailed)?; // Compute the commitment of the pre-fee delta, which goes into the proven transaction, // since it is the output of the transaction and so is needed for proof verification. let pre_fee_delta_commitment: Word = pre_fee_account_delta.to_commitment(); - let builder = ProvenTransactionBuilder::new( + // The full transaction delta is the pre fee delta with the fee asset removed. + let mut post_fee_account_delta = pre_fee_account_delta; + post_fee_account_delta + .vault_mut() + .remove_asset(Asset::from(tx_outputs.fee)) + .map_err(TransactionProverError::RemoveFeeAssetFromDelta)?; + + let account_update_details = if account.has_public_state() { + AccountUpdateDetails::Delta(post_fee_account_delta) + } else { + AccountUpdateDetails::Private + }; + + let account_update = TxAccountUpdate::new( account.id(), account.initial_commitment(), tx_outputs.account.to_commitment(), pre_fee_delta_commitment, + account_update_details, + ) + .map_err(TransactionProverError::ProvenTransactionBuildFailed)?; + + ProvenTransaction::new( + account_update, + input_notes.iter(), + output_notes, ref_block_num, ref_block_commitment, tx_outputs.fee, tx_outputs.expiration_block_num, proof, ) - .add_input_notes(input_notes) - .add_output_notes(output_notes); - - // The full transaction delta is the pre fee delta with the fee asset removed. - let mut post_fee_account_delta = pre_fee_account_delta; - post_fee_account_delta - .vault_mut() - .remove_asset(Asset::from(tx_outputs.fee)) - .map_err(TransactionProverError::RemoveFeeAssetFromDelta)?; - - let builder = match account.has_public_state() { - true => { - let account_update_details = AccountUpdateDetails::Delta(post_fee_account_delta); - builder.account_update_details(account_update_details) - }, - false => builder, - }; - - builder.build().map_err(TransactionProverError::ProvenTransactionBuildFailed) + .map_err(TransactionProverError::ProvenTransactionBuildFailed) } - pub fn prove( + pub async fn prove( &self, tx_inputs: impl Into, ) -> Result { @@ -133,6 +140,7 @@ impl LocalTransactionProver { &mut host, self.proof_options.clone(), ) + .await .map_err(TransactionProverError::TransactionProgramExecutionFailed)?; // Extract transaction outputs and process transaction data. diff --git a/crates/miden-tx/src/prover/prover_host.rs b/crates/miden-tx/src/prover/prover_host.rs index db00cdf2d0..b6b4156678 100644 --- a/crates/miden-tx/src/prover/prover_host.rs +++ b/crates/miden-tx/src/prover/prover_host.rs @@ -1,25 +1,21 @@ use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - BaseHost, - EventError, - MastForest, - MastForestStore, - ProcessState, - SyncHost, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::EventError; +use miden_processor::mast::MastForest; +use miden_processor::{FutureMaybeSend, Host, MastForestStore, ProcessorState}; use miden_protocol::Word; use miden_protocol::account::{AccountDelta, PartialAccount}; use miden_protocol::assembly::debuginfo::Location; use miden_protocol::assembly::{SourceFile, SourceSpan}; -use miden_protocol::transaction::{InputNote, InputNotes, OutputNote}; +use miden_protocol::transaction::{InputNote, InputNotes, RawOutputNote}; +use miden_protocol::vm::{EventId, EventName}; use crate::host::{RecipientData, ScriptMastForestStore, TransactionBaseHost, TransactionEvent}; use crate::{AccountProcedureIndexMap, TransactionKernelError}; -/// The transaction prover host is responsible for handling [`SyncHost`] requests made by the +/// The transaction prover host is responsible for handling [`Host`] requests made by the /// transaction kernel during proving. pub struct TransactionProverHost<'store, STORE> where @@ -59,7 +55,7 @@ where // -------------------------------------------------------------------------------------------- /// Consumes `self` and returns the account delta, input and output notes. - pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { + pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { self.base_host.into_parts() } } @@ -67,7 +63,7 @@ where // HOST IMPLEMENTATION // ================================================================================================ -impl BaseHost for TransactionProverHost<'_, STORE> +impl Host for TransactionProverHost<'_, STORE> where STORE: MastForestStore, { @@ -80,17 +76,33 @@ where // is only used to improve error message quality which we shouldn't run into here. (SourceSpan::UNKNOWN, None) } + + fn get_mast_forest(&self, node_digest: &Word) -> impl FutureMaybeSend>> { + let result = self.base_host.get_mast_forest(node_digest); + async move { result } + } + + fn on_event( + &mut self, + process: &ProcessorState, + ) -> impl FutureMaybeSend, EventError>> { + let result = self.on_event_sync(process); + async move { result } + } + + fn resolve_event(&self, event_id: EventId) -> Option<&EventName> { + self.base_host.resolve_event(event_id) + } } -impl SyncHost for TransactionProverHost<'_, STORE> +impl TransactionProverHost<'_, STORE> where STORE: MastForestStore, { - fn get_mast_forest(&self, node_digest: &Word) -> Option> { - self.base_host.get_mast_forest(node_digest) - } - - fn on_event(&mut self, process: &ProcessState) -> Result, EventError> { + fn on_event_sync( + &mut self, + process: &ProcessorState, + ) -> Result, EventError> { if let Some(advice_mutations) = self.base_host.handle_core_lib_events(process)? { return Ok(advice_mutations); } diff --git a/deny.toml b/deny.toml index 3679142018..29b7c11c46 100644 --- a/deny.toml +++ b/deny.toml @@ -12,6 +12,7 @@ ignore = [ "RUSTSEC-2024-0436", # paste is unmaintained but no alternative available "RUSTSEC-2025-0055", # tracing-subscriber vulnerability - will be fixed by upgrade "RUSTSEC-2025-0056", # adler is unmaintained but used by miniz_oxide + "RUSTSEC-2025-0141", # bincode is unmaintained, replace with wincode (https://github.com/0xMiden/miden-vm/issues/2550) ] yanked = "warn" @@ -22,7 +23,7 @@ allow = [ "Apache-2.0", "BSD-2-Clause", "BSD-3-Clause", - "ISC", + "CC0-1.0", "MIT", "Unicode-3.0", "Zlib", @@ -44,6 +45,9 @@ highlight = "all" multiple-versions = "deny" skip = [ #{ name = "ansi_term", version = "=0.11.0" }, + # Allow duplicate rand versions - miden-field uses 0.10, miden-vm uses 0.9 + { name = "rand" }, + { name = "rand_core" }, ] skip-tree = [ # Allow getrandom v0.2.x - legacy version used by nanorand @@ -54,14 +58,11 @@ skip-tree = [ { name = "rustc_version", version = "=0.2.*" }, # Allow unicode-width v0.1.x - used by miden-formatting vs textwrap conflict { name = "unicode-width", version = "=0.1.*" }, - # Allow windows-targets v0.48.x - older Windows target version - { name = "windows-targets", version = "=0.48.*" }, - # Allow windows-sys v0.48.x/v0.59.x - multiple Windows system libraries - { name = "windows-sys", version = "=0.48.*" }, - { name = "windows-sys", version = "=0.59.*" }, # Allow syn v1.x and v2.x - our derive macros need v1.x while ecosystem uses v2.x { name = "syn", version = "=1.0.109" }, - { name = "syn", version = "=2.0.111" }, + { name = "syn", version = "=2.0.117" }, + # Allow spin v0.9.x - legacy version used by some dependencies + { name = "spin", version = "=0.9.*" }, ] wildcards = "allow" diff --git a/docs/src/account/address.md b/docs/src/account/address.md index fe979e730e..9ceda4fa58 100644 --- a/docs/src/account/address.md +++ b/docs/src/account/address.md @@ -107,8 +107,8 @@ The encryption key routing parameter enables secure note payload encryption by a The supported **encryption schemes** are: - `X25519_XChaCha20Poly1305`: Curve25519-based key exchange with XChaCha20-Poly1305 authenticated encryption - `K256_XChaCha20Poly1305`: secp256k1-based key exchange with XChaCha20-Poly1305 authenticated encryption -- `X25519_AeadRpo`: Curve25519-based key exchange with RPO-based authenticated encryption -- `K256_AeadRpo`: secp256k1-based key exchange with RPO-based authenticated encryption +- `X25519_AeadPoseidon2`: Curve25519-based key exchange with Poseidon2-based authenticated encryption +- `K256_AeadPoseidon2`: secp256k1-based key exchange with Poseidon2-based authenticated encryption The encryption key is optional in an address. If not provided, senders may use alternative encryption mechanisms or send unencrypted notes. diff --git a/docs/src/account/components.md b/docs/src/account/components.md index 423e37d97b..4adcbcb6e3 100644 --- a/docs/src/account/components.md +++ b/docs/src/account/components.md @@ -7,7 +7,7 @@ title: "Components" Account components are reusable units of functionality that define a part of an account's code and storage. Multiple account components can be merged together to form an account's final [code](./code) and [storage](./storage). -As an example, consider a typical wallet account, capable of holding a user's assets and requiring authentication whenever assets are added or removed. Such an account can be created by merging a `BasicWallet` component with an `Falcon512Rpo` authentication component. The basic wallet does not need any storage, but contains the code to move assets in and out of the account vault. The authentication component holds a user's public key in storage and additionally contains the code to verify a signature against that public key. Together, these components form a fully functional wallet account. +As an example, consider a typical wallet account, capable of holding a user's assets and requiring authentication whenever assets are added or removed. Such an account can be created by merging a `BasicWallet` component with a `Falcon512Poseidon2` authentication component. The basic wallet does not need any storage, but contains the code to move assets in and out of the account vault. The authentication component holds a user's public key in storage and additionally contains the code to verify a signature against that public key. Together, these components form a fully functional wallet account. ## Account Component schemas @@ -98,7 +98,7 @@ In TOML, these are declared using dotted array keys: **Value-slot** entries describe their schema via `WordSchema`. A value type can be either: -- **Simple**: defined through the `type = ""` field, indicating the expected `SchemaTypeId` for the entire word. The value is supplied at instantiation time via `InitStorageData`. Felt types are stored as full words in the following layout: `[0, 0, 0, ]`. +- **Simple**: defined through the `type = ""` field, indicating the expected `SchemaType` for the entire word. The value is supplied at instantiation time via `InitStorageData`. Felt types are stored as full words in the following layout: `[0, 0, 0, ]`. - **Composite**: provided through `type = [ ... ]`, which contains exactly four `FeltSchema` descriptors. Each element is either a named typed field (optionally with `default-value`) or a `void` element for reserved/padding zeros. Composite schema entries reuse the existing TOML structure for four-element words, while simple schemas rely on `type`. In our example, the `token_metadata` slot uses a composite schema (`type = [...]`) mixing typed fields (`max_supply`, `decimals`) with defaults (`symbol`) and a reserved/padding `void` element. diff --git a/docs/src/asset.md b/docs/src/asset.md index ce0e15ec85..8cd17a0299 100644 --- a/docs/src/asset.md +++ b/docs/src/asset.md @@ -64,6 +64,65 @@ Non-fungible assets are encoded by hashing the `Asset` data into 32 bytes and pl Assets in Miden can be burned through various methods, such as rendering them unspendable by storing them in an unconsumable note, or sending them back to their original faucet for burning using it's dedicated function. +### Callbacks + +Asset callbacks allow a faucet to execute custom logic whenever one of its assets is added to an account vault or to an output note. This gives asset issuers a mechanism to enforce policies on their assets. For example, maintaining a block list of accounts that are not allowed to receive the asset or globally pausing transfers of assets. + +#### How callbacks work + +Callbacks involve two parts: a **per-asset flag** and **faucet-level callback procedures**. + +**Per-asset callback flag.** Every asset carries a single-bit callback flag in its vault key. When the flag is `Enabled`, the kernel checks for and invokes callbacks on the issuing faucet whenever the asset is added to a vault or note. When the flag is `Disabled`, callbacks are skipped entirely. This flag is set at asset creation time and the protocol does not prevent issuing assets with different flags from the same faucet. Technically, this gives faucets the ability to issue a callback-enabled and a callback-disabled variant of their assets. + +:::warning +Two assets issued by the same faucet with _different_ callback flags are considered completely different assets by the protocol. +::: + +It is recommended that faucets issue all of their assets with the same flag to ensure all assets issued by a faucet are treated as one type of asset. This is ensured when using `faucet::create_fungible_asset` or `faucet::create_non_fungible_asset`. + +**Faucet callback procedures.** A faucet registers callbacks by storing the procedure root (hash) of one if its public account procedures in a well-known storage slot. Two callbacks are supported: + +| Callback | Storage slot name | Triggered when | +|---|---|---| +| `on_before_asset_added_to_account` | `miden::protocol::faucet::callback::on_before_asset_added_to_account` | The asset is added to an account's vault (via `native_account::add_asset`). | +| `on_before_asset_added_to_note` | `miden::protocol::faucet::callback::on_before_asset_added_to_note` | The asset is added to an output note (via `output_note::add_asset`). | + +Account components that need to add callbacks to an account's storage should use the `AssetCallbacks` type, which provides an easy-to-use abstraction over these details. + +#### Callback interfaces + +The transaction kernel invokes the callback on the issuing faucet and the callback receives the asset key and value and is expected to return the processed asset value. + +:::warning +At this time, the processed asset value must be the same as the asset value, but in the future this limitation may be lifted. +::: + +The **account callback** receives: + +``` +Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +Outputs: [PROCESSED_ASSET_VALUE, pad(12)] +``` + +The **note callback** receives the additional `note_idx` identifying which output note the asset is being added to: + +``` +Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] +Outputs: [PROCESSED_ASSET_VALUE, pad(12)] +``` + +Both callbacks are invoked via `call`, so they must follow the convention of accepting and returning 16 stack elements (input + padding). + +#### Callback skipping + +A callback is not invoked in any of these cases: + +- The asset's callback flag is `Disabled`. +- The issuing faucet does not have the corresponding callback storage slot. +- The callback storage slot contains the empty word. + +This means assets with callbacks enabled can still be used even if the faucet has not (yet) registered a callback procedure. + ## Alternative asset models :::note diff --git a/docs/src/protocol_library.md b/docs/src/protocol_library.md index cdc64aef74..7981bba1d7 100644 --- a/docs/src/protocol_library.md +++ b/docs/src/protocol_library.md @@ -33,20 +33,22 @@ Active account procedures can be used to read from storage, fetch or compute com | Procedure | Description | Context | | -------------------------------- | ----------------------------- | ----------------------------- | -| `get_id` | Returns the ID of the active account.

**Inputs:** `[]`
**Outputs:** `[account_id_prefix, account_id_suffix]` | Any | +| `get_id` | Returns the ID of the active account.

**Inputs:** `[]`
**Outputs:** `[account_id_suffix, account_id_prefix]` | Any | | `get_nonce` | Returns the nonce of the active account. Always returns the initial nonce as it can only be incremented in auth procedures.

**Inputs:** `[]`
**Outputs:** `[nonce]` | Any | | `get_initial_commitment` | Returns the active account commitment at the beginning of the transaction.

**Inputs:** `[]`
**Outputs:** `[INIT_COMMITMENT]` | Any | | `compute_commitment` | Computes and returns the account commitment from account data stored in memory.

**Inputs:** `[]`
**Outputs:** `[ACCOUNT_COMMITMENT]` | Any | | `get_code_commitment` | Gets the account code commitment of the active account.

**Inputs:** `[]`
**Outputs:** `[CODE_COMMITMENT]` | Account | | `get_initial_storage_commitment` | Returns the storage commitment of the active account at the beginning of the transaction.

**Inputs:** `[]`
**Outputs:** `[INIT_STORAGE_COMMITMENT]` | Any | | `compute_storage_commitment` | Computes the latest account storage commitment of the active account.

**Inputs:** `[]`
**Outputs:** `[STORAGE_COMMITMENT]` | Account | -| `get_item` | Gets an item from the account storage.

**Inputs:** `[slot_id_prefix, slot_id_suffix]`
**Outputs:** `[VALUE]` | Account | -| `get_initial_item` | Gets the initial item from the account storage slot as it was at the beginning of the transaction.

**Inputs:** `[slot_id_prefix, slot_id_suffix]`
**Outputs:** `[VALUE]` | Account | -| `get_map_item` | Returns the VALUE located under the specified KEY within the map contained in the given account storage slot.

**Inputs:** `[slot_id_prefix, slot_id_suffix, KEY]`
**Outputs:** `[VALUE]` | Account | -| `get_initial_map_item` | Gets the initial VALUE from the account storage map as it was at the beginning of the transaction.

**Inputs:** `[slot_id_prefix, slot_id_suffix, KEY]`
**Outputs:** `[VALUE]` | Account | -| `get_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault.

**Inputs:** `[faucet_id_prefix, faucet_id_suffix]`
**Outputs:** `[balance]` | Any | -| `get_initial_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault at the beginning of the transaction.

**Inputs:** `[faucet_id_prefix, faucet_id_suffix]`
**Outputs:** `[init_balance]` | Any | -| `has_non_fungible_asset` | Returns a boolean indicating whether the non-fungible asset is present in the active account's vault.

**Inputs:** `[ASSET]`
**Outputs:** `[has_asset]` | Any | +| `get_item` | Gets an item from the account storage.

**Inputs:** `[slot_id_suffix, slot_id_prefix]`
**Outputs:** `[VALUE]` | Account | +| `get_initial_item` | Gets the initial item from the account storage slot as it was at the beginning of the transaction.

**Inputs:** `[slot_id_suffix, slot_id_prefix]`
**Outputs:** `[VALUE]` | Account | +| `get_map_item` | Returns the VALUE located under the specified KEY within the map contained in the given account storage slot.

**Inputs:** `[slot_id_suffix, slot_id_prefix, KEY]`
**Outputs:** `[VALUE]` | Account | +| `get_initial_map_item` | Gets the initial VALUE from the account storage map as it was at the beginning of the transaction.

**Inputs:** `[slot_id_suffix, slot_id_prefix, KEY]`
**Outputs:** `[VALUE]` | Account | +| `get_asset` | Returns the asset associated with the provided asset vault key in the active account's vault.

**Inputs:** `[ASSET_KEY]`
**Outputs:** `[ASSET_VALUE]` | Any | +| `get_initial_asset` | Returns the asset associated with the provided asset vault key in the active account's vault at the beginning of the transaction.

**Inputs:** `[ASSET_KEY]`
**Outputs:** `[ASSET_VALUE]` | Any | +| `get_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault.

**Inputs:** `[faucet_id_suffix, faucet_id_prefix]`
**Outputs:** `[balance]` | Any | +| `get_initial_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault at the beginning of the transaction.

**Inputs:** `[faucet_id_suffix, faucet_id_prefix]`
**Outputs:** `[init_balance]` | Any | +| `has_non_fungible_asset` | Returns a boolean indicating whether the non-fungible asset is present in the active account's vault.

**Inputs:** `[ASSET_VALUE]`
**Outputs:** `[has_asset]` | Any | | `get_initial_vault_root` | Returns the vault root of the active account at the beginning of the transaction.

**Inputs:** `[]`
**Outputs:** `[INIT_VAULT_ROOT]` | Any | | `get_vault_root` | Returns the vault root of the active account.

**Inputs:** `[]`
**Outputs:** `[VAULT_ROOT]` | Any | | `get_num_procedures` | Returns the number of procedures in the active account.

**Inputs:** `[]`
**Outputs:** `[num_procedures]` | Any | @@ -59,13 +61,13 @@ Native account procedures can be used to write to storage, add or remove assets | Procedure | Description | Context | | ------------------------------ | ------------------------------ | ------------------------------ | -| `get_id` | Returns the ID of the native account of the transaction.

**Inputs:** `[]`
**Outputs:** `[account_id_prefix, account_id_suffix]` | Any | +| `get_id` | Returns the ID of the native account of the transaction.

**Inputs:** `[]`
**Outputs:** `[account_id_suffix, account_id_prefix]` | Any | | `incr_nonce` | Increments the nonce of the native account by one and returns the new nonce. Can only be called from auth procedures.

**Inputs:** `[]`
**Outputs:** `[final_nonce]` | Auth | | `compute_delta_commitment` | Computes the commitment to the native account's delta. Can only be called from auth procedures.

**Inputs:** `[]`
**Outputs:** `[DELTA_COMMITMENT]` | Auth | -| `set_item` | Sets an item in the native account storage.

**Inputs:** `[slot_id_prefix, slot_id_suffix, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | -| `set_map_item` | Sets VALUE under the specified KEY within the map contained in the given native account storage slot.

**Inputs:** `[slot_id_prefix, slot_id_suffix, KEY, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | -| `add_asset` | Adds the specified asset to the vault. For fungible assets, returns the total after addition.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET']` | Native & Account | -| `remove_asset` | Removes the specified asset from the vault.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET]` | Native & Account | +| `set_item` | Sets an item in the native account storage.

**Inputs:** `[slot_id_suffix, slot_id_prefix, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | +| `set_map_item` | Sets VALUE under the specified KEY within the map contained in the given native account storage slot.

**Inputs:** `[slot_id_suffix, slot_id_prefix, KEY, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | +| `add_asset` | Adds the specified asset to the vault. For fungible assets, returns the total after addition.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[ASSET_VALUE']` | Native & Account | +| `remove_asset` | Removes the specified asset from the vault and returns the remaining asset value.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[REMAINING_ASSET_VALUE]` | Native & Account | | `was_procedure_called` | Returns 1 if a native account procedure was called during transaction execution, and 0 otherwise.

**Inputs:** `[PROC_ROOT]`
**Outputs:** `[was_called]` | Any | ## Active Note Procedures (`miden::protocol::active_note`) @@ -78,7 +80,7 @@ Active note procedures can be used to fetch data from the note that is currently | `get_recipient` | Returns the [recipient](note.md#note-recipient-restricting-consumption) of the active note.

**Inputs:** `[]`
**Outputs:** `[RECIPIENT]` | Note | | `get_storage` | Writes the note's [inputs](note.md#inputs) to the specified memory address.

**Inputs:** `[dest_ptr]`
**Outputs:** `[num_storage_items, dest_ptr]` | Note | | `get_metadata` | Returns the [metadata](note.md#metadata) of the active note.

**Inputs:** `[]`
**Outputs:** `[METADATA]` | Note | -| `get_sender` | Returns the sender of the active note.

**Inputs:** `[]`
**Outputs:** `[sender_id_prefix, sender_id_suffix]` | Note | +| `get_sender` | Returns the sender of the active note.

**Inputs:** `[]`
**Outputs:** `[sender_id_suffix, sender_id_prefix]` | Note | | `get_serial_number` | Returns the [serial number](note.md#serial-number) of the active note.

**Inputs:** `[]`
**Outputs:** `[SERIAL_NUMBER]` | Note | | `get_script_root` | Returns the [script root](note.md#script) of the active note.

**Inputs:** `[]`
**Outputs:** `[SCRIPT_ROOT]` | Note | @@ -92,7 +94,7 @@ Input note procedures can be used to fetch data on input notes consumed by the t | `get_assets` | Writes the [assets](note.md#assets) of the input note with the specified index into memory starting at the specified address.

**Inputs:** `[dest_ptr, note_index]`
**Outputs:** `[num_assets, dest_ptr, note_index]` | Any | | `get_recipient` | Returns the [recipient](note.md#note-recipient-restricting-consumption) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[RECIPIENT]` | Any | | `get_metadata` | Returns the [metadata](note.md#metadata) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[METADATA]` | Any | -| `get_sender` | Returns the sender of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[sender_id_prefix, sender_id_suffix]` | Any | +| `get_sender` | Returns the sender of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[sender_id_suffix, sender_id_prefix]` | Any | | `get_storage_info` | Returns the [inputs](note.md#inputs) commitment and length of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[NOTE_STORAGE_COMMITMENT, num_storage_items]` | Any | | `get_script_root` | Returns the [script root](note.md#script) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[SCRIPT_ROOT]` | Any | | `get_serial_number` | Returns the [serial number](note.md#serial-number) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[SERIAL_NUMBER]` | Any | @@ -106,7 +108,7 @@ Output note procedures can be used to fetch data on output notes created by the | `create` | Creates a new output note and returns its index.

**Inputs:** `[tag, note_type, RECIPIENT]`
**Outputs:** `[note_idx]` | Native & Account | | `get_assets_info` | Returns the information about assets in the output note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[ASSETS_COMMITMENT, num_assets]` | Any | | `get_assets` | Writes the assets of the output note with the specified index into memory starting at the specified address.

**Inputs:** `[dest_ptr, note_index]`
**Outputs:** `[num_assets, dest_ptr, note_index]` | Any | -| `add_asset` | Adds the `ASSET` to the output note specified by the index.

**Inputs:** `[ASSET, note_idx]`
**Outputs:** `[]` | Native | +| `add_asset` | Adds the asset to the output note specified by the index.

**Inputs:** `[ASSET_KEY, ASSET_VALUE, note_idx]`
**Outputs:** `[]` | Native | | `set_attachment` | Sets the attachment of the note specified by the index.

If attachment_kind == Array, there must be an advice map entry for ATTACHMENT.

**Inputs:**
`Operand Stack: [note_idx, attachment_scheme, attachment_kind, ATTACHMENT]`
`Advice map: { ATTACHMENT?: [[ATTACHMENT_ELEMENTS]] }`
**Outputs:** `[]` | Native | | `set_array_attachment` | Sets the attachment of the note specified by the note index to the provided ATTACHMENT which commits to an array of felts.

**Inputs:**
`Operand Stack: [note_idx, attachment_scheme, ATTACHMENT]`
`Advice map: { ATTACHMENT: [[ATTACHMENT_ELEMENTS]] }`
**Outputs:** `[]` | Native | | `set_word_attachment` | Sets the attachment of the note specified by the note index to the provided word.

**Inputs:** `[note_idx, attachment_scheme, ATTACHMENT]`
**Outputs:** `[]` | @@ -123,7 +125,7 @@ Note utility procedures can be used to compute the required utility data or writ | `write_assets_to_memory` | Writes the assets data stored in the advice map to the memory specified by the provided destination pointer.

**Inputs:** `[ASSETS_COMMITMENT, num_assets, dest_ptr]`
**Outputs:** `[num_assets, dest_ptr]` | Any | | `build_recipient_hash` | Returns the `RECIPIENT` for a specified `SERIAL_NUM`, `SCRIPT_ROOT`, and storage commitment.

**Inputs:** `[SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT]`
**Outputs:** `[RECIPIENT]` | Any | | `build_recipient` | Builds the recipient hash from note storage, script root, and serial number.

**Inputs:** `[storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT]`
**Outputs:** `[RECIPIENT]` | Any | -| `extract_sender_from_metadata` | Extracts the sender ID from the provided metadata word.

**Inputs:** `[METADATA]`
**Outputs:** `[sender_id_prefix, sender_id_suffix]` | Any | +| `extract_sender_from_metadata` | Extracts the sender ID from the provided metadata word.

**Inputs:** `[METADATA]`
**Outputs:** `[sender_id_suffix, sender_id_prefix]` | Any | ## Transaction Procedures (`miden::protocol::tx`) @@ -138,7 +140,7 @@ Transaction procedures manage transaction-level operations including note creati | `get_output_notes_commitment` | Returns the output notes commitment hash.

**Inputs:** `[]`
**Outputs:** `[OUTPUT_NOTES_COMMITMENT]` | Any | | `get_num_input_notes` | Returns the total number of input notes consumed by this transaction.

**Inputs:** `[]`
**Outputs:** `[num_input_notes]` | Any | | `get_num_output_notes` | Returns the current number of output notes created in this transaction.

**Inputs:** `[]`
**Outputs:** `[num_output_notes]` | Any | -| `execute_foreign_procedure` | Executes the provided procedure against the foreign account.

**Inputs:** `[foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, , pad(n)]`
**Outputs:** `[]` | Any | +| `execute_foreign_procedure` | Executes the provided procedure against the foreign account.

**Inputs:** `[foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, , pad(n)]`
**Outputs:** `[]` | Any | | `get_expiration_block_delta` | Returns the transaction expiration delta, or 0 if not set.

**Inputs:** `[]`
**Outputs:** `[block_height_delta]` | Any | | `update_expiration_block_delta` | Updates the transaction expiration delta.

**Inputs:** `[block_height_delta]`
**Outputs:** `[]` | Any | @@ -148,10 +150,11 @@ Faucet procedures allow reading and writing to faucet accounts to mint and burn | Procedure | Description | Context | | ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- | -| `create_fungible_asset` | Creates a fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[amount]`
**Outputs:** `[ASSET]` | Faucet | -| `create_non_fungible_asset` | Creates a non-fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[DATA_HASH]`
**Outputs:** `[ASSET]` | Faucet | -| `mint` | Mint an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET]` | Native & Account & Faucet | -| `burn` | Burn an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET]` | Native & Account & Faucet | +| `create_fungible_asset` | Creates a fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[amount]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Faucet | +| `create_non_fungible_asset` | Creates a non-fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[DATA_HASH]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Faucet | +| `mint` | Mint an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[NEW_ASSET_VALUE]` | Native & Account & Faucet | +| `burn` | Burn an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[]` | Native & Account & Faucet | +| `has_callbacks` | Returns whether the active account defines callbacks.

**Inputs:** `[]`
**Outputs:** `[has_callbacks]` | Any | ## Asset Procedures (`miden::protocol::asset`) @@ -159,5 +162,5 @@ Asset procedures provide utilities for creating fungible and non-fungible assets | Procedure | Description | Context | | -------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| `build_fungible_asset` | Builds a fungible asset for the specified fungible faucet and amount.

**Inputs:** `[faucet_id_prefix, faucet_id_suffix, amount]`
**Outputs:** `[ASSET]` | Any | -| `build_non_fungible_asset` | Builds a non-fungible asset for the specified non-fungible faucet and data hash.

**Inputs:** `[faucet_id_prefix, DATA_HASH]`
**Outputs:** `[ASSET]` | Any | +| `create_fungible_asset` | Builds a fungible asset for the specified fungible faucet and amount.

**Inputs:** `[enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Any | +| `create_non_fungible_asset` | Builds a non-fungible asset for the specified non-fungible faucet and data hash.

**Inputs:** `[faucet_id_suffix, faucet_id_prefix, DATA_HASH]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Any | diff --git a/docs/src/transaction.md b/docs/src/transaction.md index 74ecf8f4f9..06f4c83f0b 100644 --- a/docs/src/transaction.md +++ b/docs/src/transaction.md @@ -66,7 +66,7 @@ To illustrate the `Transaction` protocol, we provide two examples for a basic `T Let's assume account A wants to create a P2ID note. P2ID notes are pay-to-ID notes that can only be consumed by a specified target account ID. Note creators can provide the target account ID using the [note storage](note#inputs). -In this example, account A uses the basic wallet and the authentication component provided by `miden-standards`. The basic wallet component defines the methods `wallets::basic::create_note` and `wallets::basic::move_asset_to_note` to create notes with assets, and `wallets::basic::receive_asset` to receive assets. The authentication component exposes `auth::basic::auth_tx_falcon512_rpo` which allows for signing a transaction. Some account methods like `active_account::get_id` are always exposed. +In this example, account A uses the basic wallet and the single-sig authentication component provided by `miden-standards`. The basic wallet component defines the methods `wallets::basic::create_note` and `wallets::basic::move_asset_to_note` to create notes with assets, and `wallets::basic::receive_asset` to receive assets. The authentication component exposes `auth::singlesig::auth_tx` which allows for signing a transaction. Some account methods like `active_account::get_id` are always exposed. The executor inputs to the Miden VM a `Transaction` script in which it places on the stack the data (tag, aux, note_type, execution_hint, RECIPIENT) of the note(s) that it wants to create using `wallets::basic::create_note` during the said `Transaction`. The [`NoteRecipient`](https://github.com/0xMiden/protocol/blob/next/crates/miden-protocol/src/note/recipient.rs) is a value that describes under which condition a note can be consumed and is built using a `serial_number`, the `note_script` (in this case P2ID script) and the `note_inputs`. The Miden VM will execute the `Transaction` script and create the note(s). After having been created, the executor can use `wallets::basic::move_asset_to_note` to move assets from the account's vault to the notes vault. @@ -84,7 +84,7 @@ Then the P2ID note script is being executed. The script starts by reading the no If the check passes, the note script pushes the assets it holds into the account's vault. For every asset the note contains, the script calls the `wallets::basic::receive_asset` method exposed by the account's wallet component. The `wallets::basic::receive_asset` procedure calls `native_account::add_asset`, which cannot be called from the note itself. This allows accounts to control what functionality to expose, e.g. whether the account supports receiving assets or not, and the note cannot bypass that. -After the assets are stored in the account's vault, the transaction script is being executed. The script calls `auth::basic::auth_tx_falcon512_rpo` which is explicitly exposed in the account interface. The method is used to verify a provided signature against a public key stored in the account's storage and a commitment to this specific transaction. If the signature can be verified, the method increments the nonce. +After the assets are stored in the account's vault, the transaction script is being executed. The script calls `auth::singlesig::auth_tx` which is explicitly exposed in the account interface. The method is used to verify a provided signature against a public key stored in the account's storage and a commitment to this specific transaction. If the signature can be verified, the method increments the nonce. The Epilogue finalizes the transaction by computing the final account hash, asserting the nonce increment and checking that no assets were created or destroyed in the transaction — that means the net sum of all assets must stay the same.