Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
190 changes: 39 additions & 151 deletions .github/workflows/on-pull-request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,184 +6,72 @@ on:
env:
CARGO_TERM_COLOR: always
NF4_LARGE_BLOCK_TEST: "true"

jobs:
build:
name: Check + Lint
runs-on: self-hosted
environment: DEV
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
# Add Node so npx exists for OZ upgrades validation

# Foundry install
- name: Install Foundry (forge, cast) - needed by build.rs
uses: foundry-rs/foundry-toolchain@v1
with:
version: nightly

- name: Verify forge
run: |
which forge
forge --version

- name: Install Rust nightly with rustfmt & clippy
shell: bash
run: |
rustup toolchain install nightly
rustup +nightly component add rustfmt clippy

- name: Setup Node.js (for upgrades-core validator)
uses: actions/setup-node@v4
with:
node-version: 20

- name: Install upgrades-core globally and verify npx
shell: bash
run: |
npm i -g @openzeppelin/upgrades-core@^1.37.0
which node && node -v
which npm && npm -v
which npx && npx --version
shell: bash
- name: Export PATH for child processes
run: echo "PATH=$PATH" >> $GITHUB_ENV
node -v
npm -v
npx --version

- name: Run cargo fmt
run: cargo +nightly fmt

- name: Run clippy
run: cargo clippy --all-targets -- -D warnings
run: cargo +nightly clippy --all-targets -- -D warnings

- name: Generate keys
run: |
cargo build
cargo run --release --bin key_generation

- name: Run test
run: cargo test
resync:
name: Synchronisation Test with Mock Prover
needs: [build]
runs-on: self-hosted
env:
NF4_MOCK_PROVER: "true"
NF4_CONTRACTS__DEPLOY_CONTRACTS: "true"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Clear previous run
run: docker compose --profile sync_test down -v
- name: Build
run: cargo build
- name: Generate keys
run: cargo run --release --bin key_generation
- name: Build Docker Image
run: docker compose --profile sync_test build
- name: Run Integration Test
run: docker compose --profile development run test
- name: Print logs
run: docker compose --profile development logs
- name: Remove databases
run: docker compose --profile development rm -fvs client client2 proposer db_client db_client2 db_proposer
- name: Shut Down Containers
run: |
docker compose --profile sync_test down -v

rollup-prover:
name: Rollup prover integration Test
timeout-minutes: 480
needs: [build, resync] # This test is long so start it after we're sure the others have passed
runs-on: self-hosted
env:
NF4_MOCK_PROVER: "false"
NF4_CONTRACTS__DEPLOY_CONTRACTS: "true"
steps:
- name: Checkout
uses: actions/checkout@v4
- name: link pptau cache
# This is a workaround to avoid downloading the ppot_26.ptau file every time. It creates a symlink to a cached file (which will be downloaded if it doesn't exist on the testrunner).
run: |
if [ -e /home/testrunner/ptau-cache/ppot_26.ptau ]
then
echo "Using cached ppot_26.ptau"
else
echo "Downloading ppot file to cache"
mkdir -p /home/testrunner/ptau-cache
wget https://pse-trusted-setup-ppot.s3.eu-central-1.amazonaws.com/pot28_0080/ppot_0080_26.ptau
mv ppot_0080_26.ptau /home/testrunner/ptau-cache/ppot_26.ptau
chmod 444 ptau-cache/ppot_26.ptau # Make it read-only
echo "Cached ppot_26.ptau created"
fi
ln -sf /home/testrunner/ptau-cache/ppot_26.ptau /home/testrunner/actions-runner/_work/nightfall_4_PV/nightfall_4_PV/configuration/bin/ppot_26.ptau
- name: link bn254 setup cache
shell: bash
run: |
set -euo pipefail
# Version the cache so you can invalidate by bumping NF4_SRS_VERSION
VERSION_SUFFIX="${NF4_SRS_VERSION:-v1}"
CACHE_DIR="/home/testrunner/srs-cache"
CACHE_FILE="$CACHE_DIR/bn254_setup_26.${VERSION_SUFFIX}.cache"
TARGET_DIR="$GITHUB_WORKSPACE/configuration/bin"
TARGET="$TARGET_DIR/bn254_setup_26.cache"
mkdir -p "$CACHE_DIR" "$TARGET_DIR"
# Prevent two concurrent workflows from trampling the cache
LOCKFILE="$CACHE_DIR/.bn254_setup.lock"
exec 9>"$LOCKFILE"
flock -w 600 9 || (echo "Failed to acquire cache lock" >&2; exit 1)
if [ -e "$CACHE_FILE" ]; then
echo "Using cached bn254_setup_26.cache at $CACHE_FILE"
ln -sf "$CACHE_FILE" "$TARGET"
ls -l "$TARGET"
else
echo "No cached bn254_setup_26.cache found. Generating once, then caching…"
# Ensure keygen is available
cargo build
# Prefer a narrow generation if supported; fall back to full keygen
if ! NF4_MOCK_PROVER=false cargo run --release --bin key_generation -- --only-bn254-setup; then
NF4_MOCK_PROVER=false cargo run --release --bin key_generation
fi
# Find the generated file (expecting TARGET); fallback to a search
if [ ! -e "$TARGET" ]; then
FOUND="$(find "$GITHUB_WORKSPACE" -maxdepth 5 -type f -name 'bn254_setup_26.cache' | head -n1 || true)"
if [ -z "$FOUND" ]; then
echo "ERROR: Could not locate bn254_setup_26.cache after key generation." >&2
exit 1
fi
ln -sf "$FOUND" "$TARGET"
fi
# Populate persistent cache and relink from there (immutable)
cp -f "$TARGET" "$CACHE_FILE"
chmod 444 "$CACHE_FILE"
ln -sf "$CACHE_FILE" "$TARGET"
echo "Cached and linked:"
ls -l "$CACHE_FILE" "$TARGET"
fi
- name: Clear previous run
run: docker compose --profile sync_test down -v
- name: Build
run: cargo build
- name: Generate keys
run: NF4_MOCK_PROVER=false cargo run --release --bin key_generation
- name: Hydrate setup files for Docker build (dereference symlinks)
shell: bash
run: |
set -euo pipefail
BIN_DIR="$GITHUB_WORKSPACE/configuration/bin"
mkdir -p "$BIN_DIR"
hydrate() {
local f="$1"
local p="$BIN_DIR/$f"
if [ ! -e "$p" ]; then
echo "ERROR: Expected artifact missing: $p" >&2
exit 1
fi
if [ -L "$p" ]; then
echo "Dereferencing symlink: $p"
# Copy the symlink's target contents into the workspace file
cp -fL "$p" "$p.tmp"
chmod 444 "$p.tmp"
mv -f "$p.tmp" "$p"
else
echo "Already a regular file: $p"
fi
ls -l "$p"
}
hydrate "ppot_26.ptau"
hydrate "bn254_setup_26.cache"
- name: Build Docker Image
run: docker compose --profile sync_test build
- name: Run Integration Test
run: docker compose --profile development run test
- name: Print logs
run: docker compose --profile development logs
- name: Shut Down Containers
run: |
docker compose --profile sync_test down -v

forge:
name: Smart Contract Unit Tests
needs: [build]
runs-on: self-hosted
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

# Same action here (jobs do not share state)
- name: Install Foundry (forge, cast)
uses: foundry-rs/foundry-toolchain@v1
with:
version: nightly

- name: Run Forge Tests
run: forge test
run: forge test
58 changes: 58 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Contributing to nightfall_4_Starknet

Thanks for contributing! 🎉
This repository is a **fork** of `nightfall_4_CE`, but all development happens **here**.

Please follow the steps below to avoid accidentally opening PRs against the upstream repo.

---

## Important: Where to open Pull Requests

Although this repository is forked from `nightfall_4_CE`, **all pull requests must target this repository**:

**`EYBlockchain/nightfall_4_Starknet`**

Do **NOT** open PRs against `nightfall_4_CE` unless explicitly requested.

---

## How to open a Pull Request (recommended workflow)

### 1. Create a feature branch
```bash
git checkout -b <your-name>/<short-description>
```

### 2. Push your branch to this repo
Make sure your origin remote points to nightfall_4_Starknet.
```bash
git push -u origin HEAD
```

### 3. Open the Pull Request in this repo
Open this link in your browser and select your branch:
```bash
https://github.com/EYBlockchain/nightfall_4_Starknet/compare
```

## PR quality & review process

- **Small, focused PRs (single concern).** Split large work into reviewable chunks. Prefer multiple PRs over one mega-PR.

- **PRs must be self-contained and well described.** Include:
- a clear title,
- brief context (why this change is needed),
- a comprehensive description of what changed and how it was validated (tests, screenshots/logs, etc.).

- **Reviews are collaborative.** Expect a detailed back-and-forth to reach a high-quality outcome.

- **Authors should not resolve review comments themselves.**
Comments should be resolved by the **reviewer** once they are satisfied with the fix or clarification. This ensures feedback is validated and closed with reviewer consent.

- **Respond to each review comment with a fixup commit link.**
When addressing feedback, reply to each comment with a link to the specific fixup commit that implements the change. This lets reviewers verify updates incrementally.

- **Squash fixups before merge.**
After review is complete, squash fixup commits into the main branch history prior to merge (keep the final history clean).

2 changes: 1 addition & 1 deletion lib/src/hex_conversion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ impl HexConvertible for Vec<u8> {

fn from_hex_string(hex_str: &str) -> Result<Vec<u8>, HexError> {
let s_int = hex_str.strip_prefix("0x").unwrap_or(hex_str);
if s_int.len() % 2 != 0 || s_int.is_empty() {
if !s_int.len().is_multiple_of(2) || s_int.is_empty() {
return Err(HexError::InvalidStringLength);
}
hex::decode(s_int).map_err(|_| HexError::InvalidString)
Expand Down
2 changes: 1 addition & 1 deletion lib/src/merkle_trees/append_only.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ where
let sub_tree_capacity = pow2_usize(metadata.sub_tree_height).ok_or_else(|| {
Self::Error::Error("sub_tree_capacity too large to compute capacity safely".to_string())
})?;
if leaves.len() % sub_tree_capacity != 0 {
if !leaves.len().is_multiple_of(sub_tree_capacity) {
return Err(Self::Error::IncorrectBatchSize);
}
if leaves.is_empty() {
Expand Down
4 changes: 2 additions & 2 deletions lib/src/merkle_trees/mutable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ where
let sub_tree_capacity = pow2_usize(metadata.sub_tree_height).ok_or_else(|| {
Self::Error::Error("sub_tree_height too large to compute sub tree capacity".to_string())
})?;
if leaves.len() % sub_tree_capacity != 0 {
if !leaves.len().is_multiple_of(sub_tree_capacity) {
return Err(Self::Error::IncorrectBatchSize);
}
if leaves.is_empty() {
Expand Down Expand Up @@ -812,7 +812,7 @@ where
let sub_tree_leaf_capacity = pow2_usize(metadata.sub_tree_height).ok_or_else(|| {
Self::Error::Error("sub_tree_height too large to compute capacity safely".to_string())
})?;
if commitments.len() % sub_tree_leaf_capacity != 0 {
if !commitments.len().is_multiple_of(sub_tree_leaf_capacity) {
return Err(Self::Error::IncorrectBatchSize);
}
// call insert circuit for each sub_tree
Expand Down
2 changes: 1 addition & 1 deletion lib/src/merkle_trees/trees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ pub(crate) mod helper_functions {
let mut pow1: usize = 2;
let mut pow2: usize = pow1 << 1;
while index == 0 {
if (leaf_index + 1 - pow1) % pow2 == 0 {
if (leaf_index + 1 - pow1).is_multiple_of(pow2) {
index = exp1;
} else {
pow1 = pow2;
Expand Down
2 changes: 1 addition & 1 deletion lib/src/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ where
.map_err(serde::de::Error::custom)?;
Ok(fr)
}

#[allow(dead_code)]
struct FrWrapper(Fr);

impl Serialize for FrWrapper {
Expand Down
10 changes: 5 additions & 5 deletions lib/src/validate_keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,17 +196,17 @@ async fn handle_keys_validation(params: FormParams) -> Result<impl Reply, warp::
error!(
"Verification failed - keys from the config server are incorrect. Response: {resp:?}"
);
return Err(warp::reject::custom(KeyVerificationError::new(
Err(warp::reject::custom(KeyVerificationError::new(
"Verification failed - keys from the config server are incorrect",
)));
)))
} else if download_status && !onchain_status {
error!("Verification failed - on-chain decider VK does not match regenerated local VK. Response: {resp:?}");
return Err(warp::reject::custom(KeyVerificationError::new(
Err(warp::reject::custom(KeyVerificationError::new(
"Verification failed - on-chain decider VK does not match regenerated local VK",
)));
)))
} else {
error!("Verification failed - keys from the config server and onchain decider vk are incorrect. Response: {resp:?}.");
return Err(warp::reject::custom(KeyVerificationError::new("Verification failed - keys from the config server and onchain decider vk are incorrect")));
Err(warp::reject::custom(KeyVerificationError::new("Verification failed - keys from the config server and onchain decider vk are incorrect")))
}
}

Expand Down