diff --git a/.changeset/unlucky-avocados-sell.md b/.changeset/unlucky-avocados-sell.md new file mode 100644 index 000000000..46f7f626c --- /dev/null +++ b/.changeset/unlucky-avocados-sell.md @@ -0,0 +1,5 @@ +--- +"ponder": patch +--- + +Added trace level logs for rpc requests. diff --git a/.changeset/warm-cats-march.md b/.changeset/warm-cats-march.md new file mode 100644 index 000000000..302241c9a --- /dev/null +++ b/.changeset/warm-cats-march.md @@ -0,0 +1,8 @@ +--- +"create-ponder": minor +"@ponder/client": minor +"@ponder/react": minor +"ponder": minor +--- + +Released `0.9`. Visit the [migration guide](https://ponder.sh/docs/migration-guide) for details. diff --git a/examples/feature-multichain/ponder.config.ts b/examples/feature-multichain/ponder.config.ts index b66ded766..8399f9a7c 100644 --- a/examples/feature-multichain/ponder.config.ts +++ b/examples/feature-multichain/ponder.config.ts @@ -20,23 +20,28 @@ export default createConfig({ networks: { mainnet: { chainId: 1, - transport: http(process.env.PONDER_RPC_URL_1), - pollingInterval: 15_000, + rpcUrl: [process.env.PONDER_RPC_URL_1!, process.env.PONDER_RPC_URL_1_WS!], }, base: { chainId: 8453, - transport: http(process.env.PONDER_RPC_URL_8453), - pollingInterval: 15_000, + rpcUrl: [ + process.env.PONDER_RPC_URL_8453!, + process.env.PONDER_RPC_URL_8453_WS!, + ], }, optimism: { chainId: 10, - transport: http(process.env.PONDER_RPC_URL_10), - pollingInterval: 15_000, + rpcUrl: [ + process.env.PONDER_RPC_URL_10!, + process.env.PONDER_RPC_URL_10_WS!, + ], }, polygon: { chainId: 137, - transport: http(process.env.PONDER_RPC_URL_137), - pollingInterval: 15_000, + rpcUrl: [ + process.env.PONDER_RPC_URL_137!, + process.env.PONDER_RPC_URL_137_WS!, + ], }, }, contracts: { diff --git a/packages/core/src/_test/e2e/erc20/erc20.test.ts b/packages/core/src/_test/e2e/erc20/erc20.test.ts index 082e9067a..e73da60fb 100644 --- a/packages/core/src/_test/e2e/erc20/erc20.test.ts +++ b/packages/core/src/_test/e2e/erc20/erc20.test.ts @@ -55,13 +55,11 @@ test( amount: parseEther("1"), sender: ALICE, }); - await waitForIndexedBlock({ port, networkName: "mainnet", block: { number: 2 }, }); - const result = await client.db.select().from(schema.account); expect(result[0]).toMatchObject({ diff --git a/packages/core/src/_test/e2e/erc20/ponder.config.ts b/packages/core/src/_test/e2e/erc20/ponder.config.ts index 1cd895f64..d6e2be545 100644 --- a/packages/core/src/_test/e2e/erc20/ponder.config.ts +++ b/packages/core/src/_test/e2e/erc20/ponder.config.ts @@ -1,4 +1,3 @@ -import { http } from "viem"; import { createConfig } from "../../../config/index.js"; import { erc20ABI } from "../../generated.js"; @@ -20,7 +19,7 @@ export default createConfig({ networks: { mainnet: { chainId: 1, - transport: http(`http://127.0.0.1:8545/${poolId}`), + rpcUrl: `http://127.0.0.1:8545/${poolId}`, }, }, contracts: { diff --git a/packages/core/src/_test/e2e/factory/ponder.config.ts b/packages/core/src/_test/e2e/factory/ponder.config.ts index 550163bc8..38b93a78f 100644 --- a/packages/core/src/_test/e2e/factory/ponder.config.ts +++ b/packages/core/src/_test/e2e/factory/ponder.config.ts @@ -1,5 +1,5 @@ import { factory } from "@/config/address.js"; -import { http, getAbiItem } from "viem"; +import { getAbiItem } from "viem"; import { createConfig } from "../../../config/index.js"; import { factoryABI, pairABI } from "../../generated.js"; @@ -21,7 +21,7 @@ export default createConfig({ networks: { mainnet: { chainId: 1, - transport: http(`http://127.0.0.1:8545/${poolId}`), + rpcUrl: `http://127.0.0.1:8545/${poolId}`, }, }, contracts: { diff --git a/packages/core/src/_test/utils.ts b/packages/core/src/_test/utils.ts index c2973f29a..c12c76e1f 100644 --- a/packages/core/src/_test/utils.ts +++ b/packages/core/src/_test/utils.ts @@ -1,8 +1,8 @@ import { type AddressInfo, createServer } from "node:net"; import { factory } from "@/config/address.js"; import { createConfig } from "@/config/index.js"; -import type { Network, Status } from "@/internal/types.js"; -import type { Address, Chain } from "viem"; +import type { Chain, Status } from "@/internal/types.js"; +import type { Address, Chain as ViemChain } from "viem"; import { http, createPublicClient, createTestClient, getAbiItem } from "viem"; import { mainnet } from "viem/chains"; import { erc20ABI, factoryABI, pairABI } from "./generated.js"; @@ -26,7 +26,7 @@ export const anvil = { webSocket: [`ws://127.0.0.1:8545/${poolId}`], }, }, -} as const satisfies Chain; +} as const satisfies ViemChain; export const testClient = createTestClient({ chain: anvil, @@ -56,8 +56,8 @@ export const getErc20ConfigAndIndexingFunctions = (params: { }, contracts: { Erc20: { - abi: erc20ABI, network: "mainnet", + abi: erc20ABI, address: params.address, includeCallTraces: params.includeCallTraces, includeTransactionReceipts: params.includeTransactionReceipts, @@ -171,19 +171,17 @@ export const getAccountsConfigAndIndexingFunctions = (params: { return { config, rawIndexingFunctions }; }; -export const getNetwork = (params?: { +export const getChain = (params?: { finalityBlockCount?: number; }) => { return { - name: "mainnet", - chainId: 1, chain: anvil, - transport: http(`http://127.0.0.1:8545/${poolId}`)({ chain: anvil }), + rpcUrl: `http://127.0.0.1:8545/${poolId}`, maxRequestsPerSecond: 50, pollingInterval: 1_000, finalityBlockCount: params?.finalityBlockCount ?? 1, disableCache: false, - } satisfies Network; + } satisfies Chain; }; export function getFreePort(): Promise { diff --git a/packages/core/src/bin/commands/dev.ts b/packages/core/src/bin/commands/dev.ts index e78610f52..2e4d93ffc 100644 --- a/packages/core/src/bin/commands/dev.ts +++ b/packages/core/src/bin/commands/dev.ts @@ -224,6 +224,7 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { indexingCleanupReloadable = await run({ common, database, + preBuild, schemaBuild, indexingBuild: indexingBuildResult.result, onFatalError: () => { diff --git a/packages/core/src/bin/commands/start.ts b/packages/core/src/bin/commands/start.ts index 7a310923e..dd5d783cd 100644 --- a/packages/core/src/bin/commands/start.ts +++ b/packages/core/src/bin/commands/start.ts @@ -154,6 +154,7 @@ export async function start({ cliOptions }: { cliOptions: CliOptions }) { cleanupReloadable = await run({ common, database, + preBuild, schemaBuild, indexingBuild: indexingBuildResult.result, onFatalError: () => { diff --git a/packages/core/src/bin/utils/run.test.ts b/packages/core/src/bin/utils/run.test.ts index a11a26f0c..5d9e06bca 100644 --- a/packages/core/src/bin/utils/run.test.ts +++ b/packages/core/src/bin/utils/run.test.ts @@ -6,7 +6,7 @@ import { } from "@/_test/setup.js"; import { deployErc20 } from "@/_test/simulate.js"; import { getErc20ConfigAndIndexingFunctions } from "@/_test/utils.js"; -import { getNetwork } from "@/_test/utils.js"; +import { getChain } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; import { buildSchema } from "@/build/schema.js"; import { createDatabase } from "@/database/index.js"; @@ -29,7 +29,7 @@ const account = onchainTable("account", (p) => ({ const schema = { account }; test("run() setup", async (context) => { - const network = getNetwork(); + const chain = getChain(); const { address } = await deployErc20({ sender: ALICE }); @@ -57,7 +57,7 @@ test("run() setup", async (context) => { const indexingBuild: IndexingBuild = { buildId: "buildId", - networks: [network], + chains: [chain], sources, indexingFunctions, }; @@ -78,9 +78,13 @@ test("run() setup", async (context) => { const kill = await run({ common: context.common, - database, + preBuild: { + databaseConfig: context.databaseConfig, + mode: "multichain", + }, schemaBuild, indexingBuild, + database, onFatalError: vi.fn(), onReloadableError: vi.fn(), }); @@ -93,7 +97,7 @@ test("run() setup", async (context) => { }); test("run() setup error", async (context) => { - const network = getNetwork(); + const chain = getChain(); const { address } = await deployErc20({ sender: ALICE }); @@ -122,7 +126,7 @@ test("run() setup error", async (context) => { const indexingBuild: IndexingBuild = { buildId: "buildId", - networks: [network], + chains: [chain], sources, indexingFunctions, }; @@ -145,9 +149,13 @@ test("run() setup error", async (context) => { const kill = await run({ common: context.common, - database, + preBuild: { + databaseConfig: context.databaseConfig, + mode: "multichain", + }, schemaBuild, indexingBuild, + database, onFatalError: vi.fn(), onReloadableError: () => { onReloadableErrorPromiseResolver.resolve(); diff --git a/packages/core/src/bin/utils/run.ts b/packages/core/src/bin/utils/run.ts index eb406da0c..afc165e28 100644 --- a/packages/core/src/bin/utils/run.ts +++ b/packages/core/src/bin/utils/run.ts @@ -6,15 +6,16 @@ import { createRealtimeIndexingStore } from "@/indexing-store/realtime.js"; import { createIndexingService } from "@/indexing/index.js"; import type { Common } from "@/internal/common.js"; import { getAppProgress } from "@/internal/metrics.js"; -import type { Event, IndexingBuild, SchemaBuild } from "@/internal/types.js"; +import type { IndexingBuild, PreBuild, SchemaBuild } from "@/internal/types.js"; +import { createRpc } from "@/rpc/index.js"; import { createSyncStore } from "@/sync-store/index.js"; import { decodeEvents } from "@/sync/events.js"; import { type RealtimeEvent, createSync, splitEvents } from "@/sync/index.js"; import { + ZERO_CHECKPOINT_STRING, decodeCheckpoint, - encodeCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; +import { chunk } from "@/utils/chunk.js"; import { formatEta, formatPercentage } from "@/utils/format.js"; import { never } from "@/utils/never.js"; import { createQueue } from "@ponder/common"; @@ -22,6 +23,7 @@ import { createQueue } from "@ponder/common"; /** Starts the sync and indexing services for the specified build. */ export async function run({ common, + preBuild, schemaBuild, indexingBuild, database, @@ -29,6 +31,7 @@ export async function run({ onReloadableError, }: { common: Common; + preBuild: PreBuild; schemaBuild: SchemaBuild; indexingBuild: IndexingBuild; database: Database; @@ -37,6 +40,10 @@ export async function run({ }) { let isKilled = false; + const rpcs = indexingBuild.chains.map((chain) => + createRpc({ chain, common }), + ); + const initialCheckpoint = await database.recoverCheckpoint(); const syncStore = createSyncStore({ common, database }); const metadataStore = getMetadataStore({ database }); @@ -47,27 +54,38 @@ export async function run({ runCodegen({ common }); - // Note: can throw const sync = await createSync({ common, indexingBuild, + rpcs, syncStore, - // Note: this is not great because it references the - // `realtimeQueue` which isn't defined yet onRealtimeEvent: (realtimeEvent) => { + if (realtimeEvent.type === "reorg") { + realtimeQueue.clear(); + } + return realtimeQueue.add(realtimeEvent); }, onFatalError, initialCheckpoint, + mode: preBuild.mode, }); - const handleEvents = async (events: Event[], checkpoint: string) => { - if (events.length === 0) return { status: "success" } as const; + const indexingService = createIndexingService({ + common, + indexingBuild, + rpcs, + syncStore, + }); - indexingService.updateTotalSeconds(decodeCheckpoint(checkpoint)); + const historicalIndexingStore = createHistoricalIndexingStore({ + common, + schemaBuild, + database, + isDatabaseEmpty: initialCheckpoint === ZERO_CHECKPOINT_STRING, + }); - return await indexingService.processEvents({ events }); - }; + indexingService.setIndexingStore(historicalIndexingStore); const realtimeQueue = createQueue({ initialStart: true, @@ -76,18 +94,69 @@ export async function run({ worker: async (event: RealtimeEvent) => { switch (event.type) { case "block": { - // Events must be run block-by-block, so that `database.complete` can accurately - // update the temporary `checkpoint` value set in the trigger. - for (const { checkpoint, events } of splitEvents(event.events)) { - const result = await handleEvents( - decodeEvents(common, indexingBuild.sources, events), - event.checkpoint, - ); - - if (result.status === "error") onReloadableError(result.error); - - // Set reorg table `checkpoint` column for newly inserted rows. - await database.complete({ checkpoint }); + if (event.events.length > 0) { + // Events must be run block-by-block, so that `database.complete` can accurately + // update the temporary `checkpoint` value set in the trigger. + + const perBlockEvents = splitEvents(event.events); + + common.logger.debug({ + service: "app", + msg: `Partitioned events into ${perBlockEvents.length} blocks`, + }); + + for (const { checkpoint, events } of perBlockEvents) { + const chain = indexingBuild.chains.find( + (chain) => + chain.chain.id === + Number(decodeCheckpoint(checkpoint).chainId), + )!; + + const decodedEvents = decodeEvents( + common, + indexingBuild.sources, + events, + ); + + common.logger.debug({ + service: "app", + msg: `Decoded ${decodedEvents.length} '${chain.chain.name}' events for block ${Number(decodeCheckpoint(checkpoint).blockNumber)}`, + }); + + const result = await indexingService.processEvents({ + events: decodedEvents, + }); + + common.logger.info({ + service: "app", + msg: `Indexed ${decodedEvents.length} '${chain.chain.name}' events for block ${Number(decodeCheckpoint(checkpoint).blockNumber)}`, + }); + + if (result.status === "error") onReloadableError(result.error); + + // Set reorg table `checkpoint` column for newly inserted rows. + await database.complete({ checkpoint }); + + if (preBuild.mode === "multichain") { + const chain = indexingBuild.chains.find( + (chain) => + chain.chain.id === + Number(decodeCheckpoint(checkpoint).chainId), + )!; + + common.metrics.ponder_indexing_timestamp.set( + { network: chain.chain.name }, + decodeCheckpoint(checkpoint).blockTimestamp, + ); + } else { + for (const chain of indexingBuild.chains) { + common.metrics.ponder_indexing_timestamp.set( + { network: chain.chain.name }, + decodeCheckpoint(checkpoint).blockTimestamp, + ); + } + } + } } await metadataStore.setStatus(event.status); @@ -111,29 +180,46 @@ export async function run({ }, }); - const indexingService = createIndexingService({ - common, - indexingBuild, - sync, - }); - - const historicalIndexingStore = createHistoricalIndexingStore({ - common, - schemaBuild, - database, - initialCheckpoint, - }); + await metadataStore.setStatus(sync.getStatus()); - indexingService.setIndexingStore(historicalIndexingStore); + for (const chain of indexingBuild.chains) { + const label = { network: chain.chain.name }; + common.metrics.ponder_historical_total_indexing_seconds.set( + label, + Math.max( + sync.seconds[chain.chain.name]!.end - + sync.seconds[chain.chain.name]!.start, + 0, + ), + ); + common.metrics.ponder_historical_cached_indexing_seconds.set( + label, + Math.max( + sync.seconds[chain.chain.name]!.cached - + sync.seconds[chain.chain.name]!.start, + 0, + ), + ); + common.metrics.ponder_historical_completed_indexing_seconds.set(label, 0); + common.metrics.ponder_indexing_timestamp.set( + label, + Math.max( + sync.seconds[chain.chain.name]!.cached, + sync.seconds[chain.chain.name]!.start, + ), + ); + } - await metadataStore.setStatus(sync.getStatus()); + // Reset the start timestamp so the eta estimate doesn't include + // the startup time. + common.metrics.start_timestamp = Date.now(); const start = async () => { // If the initial checkpoint is zero, we need to run setup events. - if (encodeCheckpoint(zeroCheckpoint) === initialCheckpoint) { + if (initialCheckpoint === ZERO_CHECKPOINT_STRING) { const result = await indexingService.processSetupEvents({ sources: indexingBuild.sources, - networks: indexingBuild.networks, + chains: indexingBuild.chains, }); if (result.status === "killed") { return; @@ -143,81 +229,129 @@ export async function run({ } } - // Track the last processed checkpoint, used to set metrics - let end: string | undefined; let lastFlush = Date.now(); // Run historical indexing until complete. - for await (const { events, checkpoint } of sync.getEvents()) { - end = checkpoint; + for await (const events of sync.getEvents()) { + if (isKilled) return; - const decodedEvents = decodeEvents(common, indexingBuild.sources, events); - const result = await handleEvents(decodedEvents, checkpoint); - - // underlying metrics collection is actually synchronous - // https://github.com/siimon/prom-client/blob/master/lib/histogram.js#L102-L125 - const { eta, progress } = await getAppProgress(common.metrics); if (events.length > 0) { + const decodedEvents = decodeEvents( + common, + indexingBuild.sources, + events, + ); + const eventChunks = chunk(decodedEvents, 93); + common.logger.debug({ + service: "app", + msg: `Decoded ${decodedEvents.length} events`, + }); + for (const eventChunk of eventChunks) { + const result = await indexingService.processEvents({ + events: eventChunk, + }); + + if (result.status === "killed") { + return; + } else if (result.status === "error") { + onReloadableError(result.error); + return; + } + + const checkpoint = decodeCheckpoint( + eventChunk[eventChunk.length - 1]!.checkpoint, + ); + + if (preBuild.mode === "multichain") { + const chain = indexingBuild.chains.find( + (chain) => chain.chain.id === Number(checkpoint.chainId), + )!; + common.metrics.ponder_historical_completed_indexing_seconds.set( + { network: chain.chain.name }, + Math.max( + checkpoint.blockTimestamp - + sync.seconds[chain.chain.name]!.start, + 0, + ), + ); + common.metrics.ponder_indexing_timestamp.set( + { network: chain.chain.name }, + checkpoint.blockTimestamp, + ); + } else { + for (const chain of indexingBuild.chains) { + common.metrics.ponder_historical_completed_indexing_seconds.set( + { network: chain.chain.name }, + Math.max( + checkpoint.blockTimestamp - + sync.seconds[chain.chain.name]!.start, + 0, + ), + ); + common.metrics.ponder_indexing_timestamp.set( + { network: chain.chain.name }, + checkpoint.blockTimestamp, + ); + } + } + + // Note: allows for terminal and logs to be updated + await new Promise(setImmediate); + } + + // underlying metrics collection is actually synchronous + // https://github.com/siimon/prom-client/blob/master/lib/histogram.js#L102-L125 + const { eta, progress } = await getAppProgress(common.metrics); if (eta === undefined || progress === undefined) { common.logger.info({ service: "app", - msg: `Indexed ${events.length} events`, + msg: `Indexed ${decodedEvents.length} events`, }); } else { common.logger.info({ service: "app", - msg: `Indexed ${events.length} events with ${formatPercentage(progress)} complete and ${formatEta(eta)} remaining`, + msg: `Indexed ${decodedEvents.length} events with ${formatPercentage(progress)} complete and ${formatEta(eta * 1_000)} remaining`, }); } - } - // Persist the indexing store to the db if it is too full. The `finalized` - // checkpoint is used as a mutex. Any rows in the reorg table that may - // have been written because of raw sql access are deleted. Also must truncate - // the reorg tables that may have been written because of raw sql access. - if ( - (historicalIndexingStore.isCacheFull() && events.length > 0) || - (common.options.command === "dev" && - lastFlush + 5_000 < Date.now() && - events.length > 0) - ) { - if (historicalIndexingStore.isCacheFull()) { - common.logger.debug({ - service: "indexing", - msg: `Indexing cache has exceeded ${common.options.indexingCacheMaxBytes} MB limit, starting flush`, + // Persist the indexing store to the db if it is too full. The `finalized` + // checkpoint is used as a mutex. Any rows in the reorg table that may + // have been written because of raw sql access are deleted. Also must truncate + // the reorg tables that may have been written because of raw sql access. + if ( + (historicalIndexingStore.isCacheFull() && events.length > 0) || + (common.options.command === "dev" && + lastFlush + 5_000 < Date.now() && + events.length > 0) + ) { + if (historicalIndexingStore.isCacheFull()) { + common.logger.debug({ + service: "indexing", + msg: `Indexing cache has exceeded ${common.options.indexingCacheMaxBytes} MB limit, starting flush`, + }); + } else { + common.logger.debug({ + service: "indexing", + msg: "Dev server periodic flush triggered, starting flush", + }); + } + + await database.finalize({ checkpoint: ZERO_CHECKPOINT_STRING }); + await historicalIndexingStore.flush(); + await database.complete({ checkpoint: ZERO_CHECKPOINT_STRING }); + await database.finalize({ + checkpoint: events[events.length - 1]!.checkpoint, }); - } else { + lastFlush = Date.now(); + common.logger.debug({ service: "indexing", - msg: "Dev server periodic flush triggered, starting flush", + msg: "Completed flush", }); } - - await database.finalize({ - checkpoint: encodeCheckpoint(zeroCheckpoint), - }); - await historicalIndexingStore.flush(); - await database.complete({ - checkpoint: encodeCheckpoint(zeroCheckpoint), - }); - await database.finalize({ - checkpoint: events[events.length - 1]!.checkpoint, - }); - lastFlush = Date.now(); - - common.logger.debug({ - service: "indexing", - msg: "Completed flush", - }); } await metadataStore.setStatus(sync.getStatus()); - if (result.status === "killed") { - return; - } else if (result.status === "error") { - onReloadableError(result.error); - return; - } } if (isKilled) return; @@ -232,26 +366,30 @@ export async function run({ msg: "Completed all historical events, starting final flush", }); - await database.finalize({ checkpoint: encodeCheckpoint(zeroCheckpoint) }); + await database.finalize({ checkpoint: ZERO_CHECKPOINT_STRING }); await historicalIndexingStore.flush(); - await database.complete({ checkpoint: encodeCheckpoint(zeroCheckpoint) }); + await database.complete({ checkpoint: ZERO_CHECKPOINT_STRING }); await database.finalize({ checkpoint: sync.getFinalizedCheckpoint() }); // Manually update metrics to fix a UI bug that occurs when the end // checkpoint is between the last processed event and the finalized // checkpoint. - const start = sync.getStartCheckpoint(); - common.metrics.ponder_indexing_completed_seconds.set( - decodeCheckpoint(end ?? start).blockTimestamp - - decodeCheckpoint(start).blockTimestamp, - ); - common.metrics.ponder_indexing_total_seconds.set( - decodeCheckpoint(end ?? start).blockTimestamp - - decodeCheckpoint(start).blockTimestamp, - ); - common.metrics.ponder_indexing_completed_timestamp.set( - decodeCheckpoint(end ?? start).blockTimestamp, - ); + + for (const chain of indexingBuild.chains) { + const label = { network: chain.chain.name }; + common.metrics.ponder_historical_completed_indexing_seconds.set( + label, + Math.max( + sync.seconds[chain.chain.name]!.end - + sync.seconds[chain.chain.name]!.start, + 0, + ), + ); + common.metrics.ponder_indexing_timestamp.set( + { network: chain.chain.name }, + sync.seconds[chain.chain.name]!.end, + ); + } // Become healthy common.logger.info({ diff --git a/packages/core/src/build/configAndIndexingFunctions.test.ts b/packages/core/src/build/configAndIndexingFunctions.test.ts index bd0b62497..903634879 100644 --- a/packages/core/src/build/configAndIndexingFunctions.test.ts +++ b/packages/core/src/build/configAndIndexingFunctions.test.ts @@ -2,13 +2,13 @@ import { factory } from "@/config/address.js"; import type { LogFactory, LogFilter, TraceFilter } from "@/internal/types.js"; import { shouldGetTransactionReceipt } from "@/sync/filter.js"; import { - http, type Address, parseAbiItem, toEventSelector, toFunctionSelector, zeroAddress, } from "viem"; +import { mainnet } from "viem/chains"; import { expect, test } from "vitest"; import { type Config, createConfig } from "../config/index.js"; import { @@ -35,11 +35,14 @@ const bytes2 = test("buildConfigAndIndexingFunctions() builds topics for multiple events", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { - network: { mainnet: {} }, + network: "mainnet", abi: [event0, event1], address: address1, startBlock: 16370000, @@ -65,11 +68,14 @@ test("buildConfigAndIndexingFunctions() builds topics for multiple events", asyn test("buildConfigAndIndexingFunctions() handles overloaded event signatures and combines topics", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { - network: { mainnet: {} }, + network: "mainnet", abi: [event1, event1Overloaded], address: address1, startBlock: 16370000, @@ -95,7 +101,10 @@ test("buildConfigAndIndexingFunctions() handles overloaded event signatures and test("buildConfigAndIndexingFunctions() handles multiple addresses", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -128,8 +137,14 @@ test("buildConfigAndIndexingFunctions() handles multiple addresses", async () => test("buildConfigAndIndexingFunctions() creates a source for each network for multi-network contracts", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, - optimism: { chainId: 10, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + optimism: { + chainId: 10, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -150,11 +165,14 @@ test("buildConfigAndIndexingFunctions() creates a source for each network for mu test("buildConfigAndIndexingFunctions() builds topics for event filter", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { - network: { mainnet: {} }, + network: "mainnet", abi: [event0], filter: { event: "Event0", @@ -184,11 +202,14 @@ test("buildConfigAndIndexingFunctions() builds topics for event filter", async ( test("buildConfigAndIndexingFunctions() builds topics for multiple event filters", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { - network: { mainnet: {} }, + network: "mainnet", abi: [event0, event1Overloaded], filter: [ { @@ -234,7 +255,10 @@ test("buildConfigAndIndexingFunctions() builds topics for multiple event filters test("buildConfigAndIndexingFunctions() overrides default values with network-specific values", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -262,7 +286,10 @@ test("buildConfigAndIndexingFunctions() overrides default values with network-sp test("buildConfigAndIndexingFunctions() handles network name shortcut", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -280,18 +307,21 @@ test("buildConfigAndIndexingFunctions() handles network name shortcut", async () rawIndexingFunctions: [{ name: "a:Event0", fn: () => {} }], }); - expect(sources[0]!.network.name).toBe("mainnet"); + expect(sources[0]!.chain.chain.id).toBe(mainnet.id); }); test("buildConfigAndIndexingFunctions() validates network name", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { // @ts-expect-error - network: "mainnetz", + network: "base", abi: [event0], address: address1, }, @@ -312,7 +342,10 @@ test("buildConfigAndIndexingFunctions() validates network name", async () => { test("buildConfigAndIndexingFunctions() warns for public RPC URL", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("https://cloudflare-eth.com") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -340,7 +373,10 @@ test("buildConfigAndIndexingFunctions() warns for public RPC URL", async () => { test("buildConfigAndIndexingFunctions() validates event filter event name must be present in ABI", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("https://cloudflare-eth.com") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -371,7 +407,10 @@ test("buildConfigAndIndexingFunctions() validates event filter event name must b test("buildConfigAndIndexingFunctions() validates address empty string", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("https://cloudflare-eth.com") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -396,7 +435,10 @@ test("buildConfigAndIndexingFunctions() validates address empty string", async ( test("buildConfigAndIndexingFunctions() validates address prefix", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("https://cloudflare-eth.com") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -422,7 +464,10 @@ test("buildConfigAndIndexingFunctions() validates address prefix", async () => { test("buildConfigAndIndexingFunctions() validates address length", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("https://cloudflare-eth.com") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -447,11 +492,14 @@ test("buildConfigAndIndexingFunctions() validates address length", async () => { test("buildConfigAndIndexingFunctions() coerces NaN startBlock to undefined", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { - network: { mainnet: {} }, + network: "mainnet", abi: [event0, event1], startBlock: Number.NaN, }, @@ -469,8 +517,14 @@ test("buildConfigAndIndexingFunctions() coerces NaN startBlock to undefined", as test("buildConfigAndIndexingFunctions() includeTransactionReceipts", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, - optimism: { chainId: 10, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + optimism: { + chainId: 10, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -495,8 +549,14 @@ test("buildConfigAndIndexingFunctions() includeTransactionReceipts", async () => test("buildConfigAndIndexingFunctions() includeCallTraces", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, - optimism: { chainId: 10, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + optimism: { + chainId: 10, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -531,8 +591,14 @@ test("buildConfigAndIndexingFunctions() includeCallTraces", async () => { test("buildConfigAndIndexingFunctions() includeCallTraces with factory", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, - optimism: { chainId: 10, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + optimism: { + chainId: 10, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { @@ -571,11 +637,14 @@ test("buildConfigAndIndexingFunctions() includeCallTraces with factory", async ( test("buildConfigAndIndexingFunctions() coerces NaN endBlock to undefined", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, contracts: { a: { - network: { mainnet: {} }, + network: "mainnet", abi: [event0, event1], endBlock: Number.NaN, }, @@ -593,11 +662,14 @@ test("buildConfigAndIndexingFunctions() coerces NaN endBlock to undefined", asyn test("buildConfigAndIndexingFunctions() account source", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, accounts: { a: { - network: { mainnet: {} }, + network: "mainnet", address: address1, startBlock: 16370000, endBlock: 16370020, @@ -615,8 +687,8 @@ test("buildConfigAndIndexingFunctions() account source", async () => { expect(sources).toHaveLength(2); - expect(sources[0]?.network.name).toBe("mainnet"); - expect(sources[1]?.network.name).toBe("mainnet"); + expect(sources[0]?.chain.chain.id).toBe(mainnet.id); + expect(sources[1]?.chain.chain.id).toBe(mainnet.id); expect(sources[0]?.name).toBe("a"); expect(sources[1]?.name).toBe("a"); @@ -634,11 +706,14 @@ test("buildConfigAndIndexingFunctions() account source", async () => { test("buildConfigAndIndexingFunctions() block source", async () => { const config = createConfig({ networks: { - mainnet: { chainId: 1, transport: http("http://127.0.0.1:8545") }, + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, }, blocks: { a: { - network: { mainnet: {} }, + network: "mainnet", startBlock: 16370000, endBlock: 16370020, }, @@ -652,7 +727,7 @@ test("buildConfigAndIndexingFunctions() block source", async () => { expect(sources).toHaveLength(1); - expect(sources[0]?.network.name).toBe("mainnet"); + expect(sources[0]?.chain.chain.id).toBe(mainnet.id); expect(sources[0]?.name).toBe("a"); expect(sources[0]?.filter.type).toBe("block"); // @ts-ignore diff --git a/packages/core/src/build/configAndIndexingFunctions.ts b/packages/core/src/build/configAndIndexingFunctions.ts index 8bc128f83..813215ea2 100644 --- a/packages/core/src/build/configAndIndexingFunctions.ts +++ b/packages/core/src/build/configAndIndexingFunctions.ts @@ -1,16 +1,11 @@ import type { Config } from "@/config/index.js"; -import { - getFinalityBlockCount, - getRpcUrlsForClient, - isRpcUrlPublic, -} from "@/config/networks.js"; import { BuildError } from "@/internal/errors.js"; import type { AccountSource, BlockSource, + Chain, ContractSource, IndexingFunctions, - Network, RawIndexingFunctions, Source, } from "@/internal/types.js"; @@ -23,10 +18,11 @@ import { defaultTransactionReceiptInclude, defaultTransferFilterInclude, } from "@/sync/filter.js"; -import { chains } from "@/utils/chains.js"; +import { getFinalityBlockCount } from "@/utils/finality.js"; import { toLowerCase } from "@/utils/lowercase.js"; import { dedupe } from "@ponder/common"; -import type { Hex, LogTopic } from "viem"; +import { type Hex, type LogTopic, defineChain, extractChain } from "viem"; +import * as viemChains from "viem/chains"; import { buildLogFactory } from "./factory.js"; const flattenSources = < @@ -66,54 +62,60 @@ export async function buildConfigAndIndexingFunctions({ config: Config; rawIndexingFunctions: RawIndexingFunctions; }): Promise<{ - networks: Network[]; + chains: Chain[]; sources: Source[]; indexingFunctions: IndexingFunctions; logs: { level: "warn" | "info" | "debug"; msg: string }[]; }> { const logs: { level: "warn" | "info" | "debug"; msg: string }[] = []; - const networks: Network[] = await Promise.all( - Object.entries(config.networks).map(async ([networkName, network]) => { - const { chainId, transport } = network; - - const defaultChain = - Object.values(chains).find((c) => - "id" in c ? c.id === chainId : false, - ) ?? chains.mainnet!; - const chain = { ...defaultChain, name: networkName, id: chainId }; - - // Note: This can throw. - const rpcUrls = await getRpcUrlsForClient({ transport, chain }); - rpcUrls.forEach((rpcUrl) => { - if (isRpcUrlPublic(rpcUrl)) { - logs.push({ - level: "warn", - msg: `Network '${networkName}' is using a public RPC URL (${rpcUrl}). Most apps require an RPC URL with a higher rate limit.`, - }); - } - }); + const chains = Object.entries(config.networks).map( + ([networkName, network]) => { + const rpcUrl = network.rpcUrl ?? network.transport; - if ( - network.pollingInterval !== undefined && - network.pollingInterval! < 100 - ) { + // if (isRpcUrlPublic(rpcUrl)) { + // logs.push({ + // level: "warn", + // msg: `Network '${networkName}' is using a public RPC URL (${rpcUrl}). Most apps require an RPC URL with a higher rate limit.`, + // }); + // } + + if ((network.pollingInterval ?? 1_000) < 100) { throw new Error( `Invalid 'pollingInterval' for network '${networkName}. Expected 100 milliseconds or greater, got ${network.pollingInterval} milliseconds.`, ); } + let chain = extractChain({ + // @ts-ignore + chains: Object.values(viemChains), + id: network.chainId, + }); + + if (chain === undefined) { + chain = defineChain({ + id: network.chainId, + name: networkName, + nativeCurrency: { + decimals: 18, + name: "Ether", + symbol: "ETH", + }, + rpcUrls: { default: { http: [] } }, + }); + } + + chain.name = networkName; + return { - name: networkName, - chainId, chain, - transport: network.transport({ chain }), - maxRequestsPerSecond: network.maxRequestsPerSecond ?? 50, + rpcUrl, pollingInterval: network.pollingInterval ?? 1_000, - finalityBlockCount: getFinalityBlockCount({ chainId }), + maxRequestsPerSecond: network.maxRequestsPerSecond ?? 50, disableCache: network.disableCache ?? false, - } satisfies Network; - }), + finalityBlockCount: getFinalityBlockCount({ chain }), + } satisfies Chain; + }, ); const sourceNames = new Set(); @@ -209,10 +211,10 @@ export async function buildConfigAndIndexingFunctions({ ...flattenSources(config.accounts ?? {}), ...flattenSources(config.blocks ?? {}), ]) { - if (source.network === null || source.network === undefined) { + if (source.network === undefined) { throw new Error( - `Validation failed: Network for '${source.name}' is null or undefined. Expected one of [${networks - .map((n) => `'${n.name}'`) + `Validation failed: Network for '${source.name}' is undefined. Expected one of [${chains + .map((c) => `'${c.chain.name}'`) .join(", ")}].`, ); } @@ -236,13 +238,13 @@ export async function buildConfigAndIndexingFunctions({ ); } - const network = networks.find((n) => n.name === source.network); - if (!network) { + const chain = chains.find((c) => c.chain.name === source.network); + if (chain === undefined) { throw new Error( `Validation failed: Invalid network for '${ source.name - }'. Got '${source.network}', expected one of [${networks - .map((n) => `'${n.name}'`) + }'. Got '${source.network}', expected one of [${chains + .map((c) => `'${c.chain.name}'`) .join(", ")}].`, ); } @@ -252,7 +254,7 @@ export async function buildConfigAndIndexingFunctions({ config.contracts ?? {}, ) .flatMap((source): ContractSource[] => { - const network = networks.find((n) => n.name === source.network)!; + const chain = chains.find((c) => c.chain.name === source.network)!; // Get indexing function that were registered for this contract const registeredLogEvents: string[] = []; @@ -397,7 +399,7 @@ export async function buildConfigAndIndexingFunctions({ abiEvents, abiFunctions, name: source.name, - network, + chain, } as const; const resolvedAddress = source?.address; @@ -408,7 +410,7 @@ export async function buildConfigAndIndexingFunctions({ ) { // Note that this can throw. const logFactory = buildLogFactory({ - chainId: network.chainId, + chainId: chain.chain.id, ...resolvedAddress, }); @@ -418,7 +420,7 @@ export async function buildConfigAndIndexingFunctions({ ...contractMetadata, filter: { type: "log", - chainId: network.chainId, + chainId: chain.chain.id, address: logFactory, topic0: topics.topic0, topic1: topics.topic1, @@ -442,7 +444,7 @@ export async function buildConfigAndIndexingFunctions({ ...contractMetadata, filter: { type: "trace", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: undefined, toAddress: logFactory, callType: "CALL", @@ -491,7 +493,7 @@ export async function buildConfigAndIndexingFunctions({ ...contractMetadata, filter: { type: "log", - chainId: network.chainId, + chainId: chain.chain.id, address: validatedAddress, topic0: topics.topic0, topic1: topics.topic1, @@ -515,7 +517,7 @@ export async function buildConfigAndIndexingFunctions({ ...contractMetadata, filter: { type: "trace", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: undefined, toAddress: Array.isArray(validatedAddress) ? validatedAddress @@ -557,7 +559,7 @@ export async function buildConfigAndIndexingFunctions({ const accountSources: AccountSource[] = flattenSources(config.accounts ?? {}) .flatMap((source): AccountSource[] => { - const network = networks.find((n) => n.name === source.network)!; + const chain = chains.find((c) => c.chain.name === source.network)!; const startBlockMaybeNan = source.startBlock; const fromBlock = Number.isNaN(startBlockMaybeNan) @@ -582,7 +584,7 @@ export async function buildConfigAndIndexingFunctions({ ) { // Note that this can throw. const logFactory = buildLogFactory({ - chainId: network.chainId, + chainId: chain.chain.id, ...resolvedAddress, }); @@ -590,10 +592,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transaction", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: undefined, toAddress: logFactory, includeReverted: false, @@ -605,10 +607,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transaction", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: logFactory, toAddress: undefined, includeReverted: false, @@ -620,10 +622,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transfer", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: undefined, toAddress: logFactory, includeReverted: false, @@ -639,10 +641,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transfer", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: logFactory, toAddress: undefined, includeReverted: false, @@ -684,10 +686,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transaction", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: undefined, toAddress: validatedAddress, includeReverted: false, @@ -699,10 +701,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transaction", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: validatedAddress, toAddress: undefined, includeReverted: false, @@ -714,10 +716,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transfer", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: undefined, toAddress: validatedAddress, includeReverted: false, @@ -733,10 +735,10 @@ export async function buildConfigAndIndexingFunctions({ { type: "account", name: source.name, - network, + chain, filter: { type: "transfer", - chainId: network.chainId, + chainId: chain.chain.id, fromAddress: validatedAddress, toAddress: undefined, includeReverted: false, @@ -774,7 +776,7 @@ export async function buildConfigAndIndexingFunctions({ const blockSources: BlockSource[] = flattenSources(config.blocks ?? {}) .map((source) => { - const network = networks.find((n) => n.name === source.network)!; + const chain = chains.find((c) => c.chain.name === source.network)!; const intervalMaybeNan = source.interval ?? 1; const interval = Number.isNaN(intervalMaybeNan) ? 0 : intervalMaybeNan; @@ -797,10 +799,10 @@ export async function buildConfigAndIndexingFunctions({ return { type: "block", name: source.name, - network, + chain, filter: { type: "block", - chainId: network.chainId, + chainId: chain.chain.id, interval: interval, offset: (fromBlock ?? 0) % interval, fromBlock, @@ -823,15 +825,15 @@ export async function buildConfigAndIndexingFunctions({ const sources = [...contractSources, ...accountSources, ...blockSources]; - // Filter out any networks that don't have any sources registered. - const networksWithSources = networks.filter((network) => { + // Filter out any chains that don't have any sources registered. + const chainsWithSources = chains.filter((chain) => { const hasSources = sources.some( - (source) => source.network.name === network.name, + (source) => source.chain.chain.id === chain.chain.id, ); if (!hasSources) { logs.push({ level: "warn", - msg: `No sources registered for network '${network.name}'`, + msg: `No sources registered for network '${chain.chain.name}'`, }); } return hasSources; @@ -844,7 +846,7 @@ export async function buildConfigAndIndexingFunctions({ } return { - networks: networksWithSources, + chains: chainsWithSources, sources, indexingFunctions, logs, @@ -867,7 +869,7 @@ export async function safeBuildConfigAndIndexingFunctions({ return { status: "success", sources: result.sources, - networks: result.networks, + chains: result.chains, indexingFunctions: result.indexingFunctions, logs: result.logs, } as const; diff --git a/packages/core/src/build/index.ts b/packages/core/src/build/index.ts index e63115ea3..f3ad37989 100644 --- a/packages/core/src/build/index.ts +++ b/packages/core/src/build/index.ts @@ -373,6 +373,7 @@ export const createBuild = async ({ status: "success", result: { databaseConfig: preBuild.databaseConfig, + mode: preBuild.mode, }, } as const; }, @@ -433,7 +434,7 @@ export const createBuild = async ({ result: { buildId, sources: buildConfigAndIndexingFunctionsResult.sources, - networks: buildConfigAndIndexingFunctionsResult.networks, + chains: buildConfigAndIndexingFunctionsResult.chains, indexingFunctions: buildConfigAndIndexingFunctionsResult.indexingFunctions, }, @@ -572,7 +573,7 @@ export const createBuild = async ({ ]); viteNodeRunner.moduleCache.invalidateDepTree( glob.sync(indexingPattern, { - ignore: apiPattern, + ignore: glob.sync(apiPattern), }), ); viteNodeRunner.moduleCache.invalidateDepTree(glob.sync(apiPattern)); diff --git a/packages/core/src/build/plugin.ts b/packages/core/src/build/plugin.ts index 5f67bf795..a5d70a01f 100644 --- a/packages/core/src/build/plugin.ts +++ b/packages/core/src/build/plugin.ts @@ -20,12 +20,12 @@ const apiModule = () => `import { createPublicClient } from "viem"; const publicClients = {}; -for (const network of globalThis.PONDER_INDEXING_BUILD.networks) { - publicClients[network.chainId] = createPublicClient({ - chain: network.chain, - transport: () => network.transport - }) -} +// for (const chain of globalThis.PONDER_INDEXING_BUILD.chains) { +// publicClients[network.chainId] = createPublicClient({ +// chain: network.chain, +// transport: () => network.transport +// }) +// } export const db = globalThis.PONDER_DATABASE.qb.drizzleReadonly; export { publicClients }; diff --git a/packages/core/src/build/pre.test.ts b/packages/core/src/build/pre.test.ts index ba171ca9f..f497bca13 100644 --- a/packages/core/src/build/pre.test.ts +++ b/packages/core/src/build/pre.test.ts @@ -1,6 +1,5 @@ import path from "node:path"; import type { Options } from "@/internal/options.js"; -import { http } from "viem"; import { expect, test, vi } from "vitest"; import { createConfig } from "../config/index.js"; import { buildPre } from "./pre.js"; @@ -12,7 +11,12 @@ const options = { test("buildPre() database uses pglite by default", () => { const config = createConfig({ - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); @@ -37,7 +41,12 @@ test("buildPre() database uses pglite by default", () => { test("buildPre() database respects custom pglite path", async () => { const config = createConfig({ database: { kind: "pglite", directory: "custom-pglite/directory" }, - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); @@ -54,7 +63,12 @@ test("buildPre() database respects custom pglite path", async () => { test("buildPre() database uses pglite if specified even if DATABASE_URL env var present", async () => { const config = createConfig({ database: { kind: "pglite" }, - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); @@ -73,7 +87,12 @@ test("buildPre() database uses pglite if specified even if DATABASE_URL env var test("buildPre() database uses postgres if DATABASE_URL env var present", async () => { const config = createConfig({ - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); @@ -92,7 +111,12 @@ test("buildPre() database uses postgres if DATABASE_URL env var present", async test("buildPre() database uses postgres if DATABASE_PRIVATE_URL env var present", async () => { const config = createConfig({ - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); @@ -116,7 +140,12 @@ test("buildPre() database uses postgres if DATABASE_PRIVATE_URL env var present" test("buildPre() throws for postgres database with no connection string", async () => { const config = createConfig({ database: { kind: "postgres" }, - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); @@ -138,7 +167,12 @@ test("buildPre() database with postgres uses pool config", async () => { connectionString: "postgres://username@localhost:5432/database", poolConfig: { max: 100 }, }, - networks: { mainnet: { chainId: 1, transport: http() } }, + networks: { + mainnet: { + chainId: 1, + rpcUrl: ["https://rpc.com"], + }, + }, contracts: { a: { network: "mainnet", abi: [] } }, }); diff --git a/packages/core/src/build/pre.ts b/packages/core/src/build/pre.ts index dd005838f..6ae0b5a91 100644 --- a/packages/core/src/build/pre.ts +++ b/packages/core/src/build/pre.ts @@ -18,6 +18,7 @@ export function buildPre({ options: Pick; }): { databaseConfig: DatabaseConfig; + mode: NonNullable; logs: { level: "warn" | "info" | "debug"; msg: string }[]; } { const logs: { level: "warn" | "info" | "debug"; msg: string }[] = []; @@ -112,6 +113,7 @@ export function buildPre({ return { databaseConfig, logs, + mode: config.mode ?? "omnichain", }; } @@ -131,6 +133,7 @@ export function safeBuildPre({ return { status: "success", databaseConfig: result.databaseConfig, + mode: result.mode, logs: result.logs, } as const; } catch (_error) { diff --git a/packages/core/src/config/index.test.ts b/packages/core/src/config/index.test.ts index 05dd94959..89150e74a 100644 --- a/packages/core/src/config/index.test.ts +++ b/packages/core/src/config/index.test.ts @@ -169,6 +169,21 @@ test("createConfig with multiple filters", () => { }); }); +test("createConfig network transport + rpcUrls", () => { + createConfig({ + networks: { + mainnet: { + chainId: 1, + transport: http(), + }, + optimism: { + chainId: 10, + rpcUrl: ["rpc.com"], + }, + }, + }); +}); + test("createConfig network overrides", () => { createConfig({ networks: { diff --git a/packages/core/src/config/index.ts b/packages/core/src/config/index.ts index 3dfd31b10..5b3d3f2dd 100644 --- a/packages/core/src/config/index.ts +++ b/packages/core/src/config/index.ts @@ -5,20 +5,22 @@ import type { AddressConfig } from "./address.js"; import type { GetEventFilter } from "./eventFilter.js"; export type Config = { + database?: DatabaseConfig; + mode?: "omnichain" | "multichain"; networks: { [networkName: string]: NetworkConfig }; contracts: { [contractName: string]: GetContract }; accounts: { [accountName: string]: AccountConfig }; - database?: DatabaseConfig; blocks: { [sourceName: string]: GetBlockFilter; }; }; export type CreateConfigReturnType = { + database?: DatabaseConfig; + mode?: "omnichain" | "multichain"; networks: networks; contracts: contracts; accounts: accounts; - database?: DatabaseConfig; blocks: blocks; }; @@ -28,8 +30,9 @@ export const createConfig = < const accounts = {}, const blocks = {}, >(config: { - database?: DatabaseConfig; // TODO: add jsdoc to these properties. + mode?: "omnichain" | "multichain"; + database?: DatabaseConfig; networks: NetworksConfig>; contracts?: ContractsConfig>; accounts?: AccountsConfig>; @@ -87,28 +90,50 @@ type NetworkConfig = { chainId: network extends { chainId: infer chainId extends number } ? chainId | number : number; - /** A viem `http`, `webSocket`, or `fallback` [Transport](https://viem.sh/docs/clients/transports/http.html). - * - * __To avoid rate limiting, include a custom RPC URL.__ Usage: - * - * ```ts - * import { http } from "viem"; - * - * const network = { - * name: "mainnet", - * chainId: 1, - * transport: http("https://eth-mainnet.g.alchemy.com/v2/..."), - * } - * ``` - */ - transport: Transport; /** Polling interval (in ms). Default: `1_000`. */ pollingInterval?: number; /** Maximum number of RPC requests per second. Default: `50`. */ maxRequestsPerSecond?: number; /** Disable RPC request caching. Default: `false`. */ disableCache?: boolean; -}; +} & ( + | { + /** A viem `http`, `webSocket`, or `fallback` [Transport](https://viem.sh/docs/clients/transports/http.html). + * + * __To avoid rate limiting, include a custom RPC URL.__ Usage: + * + * ```ts + * import { http } from "viem"; + * + * const network = { + * name: "mainnet", + * chainId: 1, + * transport: http("https://eth-mainnet.g.alchemy.com/v2/..."), + * } + * ``` + */ + transport: Transport; + rpcUrl?: never; + } + | { + /** A viem `http`, `webSocket`, or `fallback` [Transport](https://viem.sh/docs/clients/transports/http.html). + * + * __To avoid rate limiting, include a custom RPC URL.__ Usage: + * + * ```ts + * import { http } from "viem"; + * + * const network = { + * name: "mainnet", + * chainId: 1, + * transport: http("https://eth-mainnet.g.alchemy.com/v2/..."), + * } + * ``` + */ + transport?: never; + rpcUrl: string | string[]; + } +); type NetworksConfig = {} extends networks ? {} diff --git a/packages/core/src/config/networks.test.ts b/packages/core/src/config/networks.test.ts deleted file mode 100644 index d1e0afe0f..000000000 --- a/packages/core/src/config/networks.test.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { http, fallback, webSocket } from "viem"; -import { mainnet } from "viem/chains"; -import { expect, test } from "vitest"; -import { getRpcUrlsForClient, isRpcUrlPublic } from "./networks.js"; - -test("getRpcUrlsForClient handles default RPC URL", async () => { - const rpcUrls = await getRpcUrlsForClient({ - transport: http(), - chain: mainnet, - }); - - expect(rpcUrls).toMatchObject(["https://cloudflare-eth.com"]); -}); - -test("getRpcUrlsForClient should handle an http transport", async () => { - const rpcUrls = await getRpcUrlsForClient({ - transport: http("http://localhost:8545"), - chain: mainnet, - }); - - expect(rpcUrls).toMatchObject(["http://localhost:8545"]); -}); - -test("getRpcUrlsForClient should handle a websocket transport", async () => { - const rpcUrls = await getRpcUrlsForClient({ - transport: webSocket("wss://localhost:8545"), - chain: mainnet, - }); - - expect(rpcUrls).toMatchObject(["wss://localhost:8545"]); -}); - -test("getRpcUrlsForClient should handle a fallback containing an http transport", async () => { - const rpcUrls = await getRpcUrlsForClient({ - transport: fallback([http("http://localhost:8545")]), - chain: mainnet, - }); - - expect(rpcUrls).toMatchObject(["http://localhost:8545"]); -}); - -test("isPublicRpcUrl returns true for undefined RPC URL", () => { - const isPublic = isRpcUrlPublic(undefined); - - expect(isPublic).toBe(true); -}); - -test("isPublicRpcUrl returns true for Cloudflare public RPC URL", () => { - const isPublic = isRpcUrlPublic("https://cloudflare-eth.com"); - - expect(isPublic).toBe(true); -}); - -test("isPublicRpcUrl returns false for Alchemy RPC URL", () => { - const isPublic = isRpcUrlPublic("https://eth-mainnet.g.alchemy.com/v2/abc"); - - expect(isPublic).toBe(false); -}); diff --git a/packages/core/src/config/networks.ts b/packages/core/src/config/networks.ts deleted file mode 100644 index 63e3b503f..000000000 --- a/packages/core/src/config/networks.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { chains } from "@/utils/chains.js"; -import type { Chain, Client, Transport } from "viem"; - -/** - * Returns the number of blocks that must pass before a block is considered final. - * Note that a value of `0` indicates that blocks are considered final immediately. - * - * @param network The network to get the finality block count for. - * @returns The finality block count. - */ -export function getFinalityBlockCount({ chainId }: { chainId: number }) { - let finalityBlockCount: number; - switch (chainId) { - // Mainnet and mainnet testnets. - case 1: - case 3: - case 4: - case 5: - case 42: - case 11155111: - finalityBlockCount = 65; - break; - // Polygon. - case 137: - case 80001: - finalityBlockCount = 200; - break; - // Arbitrum. - case 42161: - case 42170: - case 421611: - case 421613: - finalityBlockCount = 240; - break; - default: - // Assume a 2-second block time, e.g. OP stack chains. - finalityBlockCount = 30; - } - - return finalityBlockCount; -} - -/** - * Returns the list of RPC URLs backing a Transport. - * - * @param transport A viem Transport. - * @returns Array of RPC URLs. - */ -export async function getRpcUrlsForClient(parameters: { - transport: Transport; - chain: Chain; -}) { - // This is how viem converts a Transport into the Client.transport type. - const { config, value } = parameters.transport({ - chain: parameters.chain, - pollingInterval: 4_000, // default viem value - retryCount: 0, - }); - const transport = { ...config, ...value } as Client["transport"]; - - async function getRpcUrlsForTransport(transport: Client["transport"]) { - switch (transport.type) { - case "http": { - return [transport.url ?? parameters.chain.rpcUrls.default.http[0]]; - } - case "webSocket": { - try { - const socket = await transport.getSocket(); - return [socket.url]; - } catch (e) { - const symbol = Object.getOwnPropertySymbols(e).find( - (symbol) => symbol.toString() === "Symbol(kTarget)", - ); - if (!symbol) return []; - const url = (e as any)[symbol]?._url; - if (!url) return []; - return [url.replace(/\/$/, "")]; - } - } - case "fallback": { - // This is how viem converts a TransportConfig into the Client.transport type. - const fallbackTransports = transport.transports.map((t: any) => ({ - ...t.config, - ...t.value, - })) as Client["transport"][]; - - const urls: (string | undefined)[] = []; - for (const fallbackTransport of fallbackTransports) { - urls.push(...(await getRpcUrlsForTransport(fallbackTransport))); - } - - return urls; - } - default: { - // TODO: Consider logging a warning here. This will catch "custom" and unknown transports, - // which we might not want to support. - return []; - } - } - } - - return getRpcUrlsForTransport(transport); -} - -let publicRpcUrls: Set | undefined = undefined; - -/** - * Returns `true` if the RPC URL is found in the list of public RPC URLs - * included in viem/chains. Handles both HTTP and WebSocket RPC URLs. - * - * @param rpcUrl An RPC URL. - * @returns Boolean indicating if the RPC URL is public. - */ -export function isRpcUrlPublic(rpcUrl: string | undefined) { - if (rpcUrl === undefined) return true; - - if (!publicRpcUrls) { - // By default, viem uses `chain.default.{http|webSocket}.[0]` if it exists. - publicRpcUrls = Object.values(chains).reduce>((acc, chain) => { - chain.rpcUrls.default.http.forEach((httpRpcUrl) => { - acc.add(httpRpcUrl); - }); - - ( - (chain.rpcUrls.default as unknown as { webSocket?: string[] }) - .webSocket ?? [] - ).forEach((webSocketRpcUrl) => { - acc.add(webSocketRpcUrl); - }); - - return acc; - }, new Set()); - } - - return publicRpcUrls.has(rpcUrl); -} diff --git a/packages/core/src/database/index.test.ts b/packages/core/src/database/index.test.ts index 7e7b700f3..f7c8c9023 100644 --- a/packages/core/src/database/index.test.ts +++ b/packages/core/src/database/index.test.ts @@ -3,9 +3,10 @@ import { buildSchema } from "@/build/schema.js"; import { onchainEnum, onchainTable, primaryKey } from "@/drizzle/onchain.js"; import { createRealtimeIndexingStore } from "@/indexing-store/realtime.js"; import { + type Checkpoint, + MAX_CHECKPOINT_STRING, + ZERO_CHECKPOINT, encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; import { wait } from "@/utils/wait.js"; import { sql } from "drizzle-orm"; @@ -23,8 +24,8 @@ const account = onchainTable("account", (p) => ({ balance: p.bigint(), })); -function createCheckpoint(index: number): string { - return encodeCheckpoint({ ...zeroCheckpoint, blockTimestamp: index }); +function createCheckpoint(checkpoint: Partial): string { + return encodeCheckpoint({ ...ZERO_CHECKPOINT, ...checkpoint }); } test("migrate() succeeds with empty schema", async (context) => { @@ -162,7 +163,10 @@ test("migrate() throws with schema used after waiting for lock", async (context) }, }); await database.migrate({ buildId: "abc" }); - await database.finalize({ checkpoint: createCheckpoint(10) }); + + await database.finalize({ + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), + }); const databaseTwo = await createDatabase({ common: context.common, @@ -202,7 +206,7 @@ test("migrate() succeeds with crash recovery", async (context) => { await database.migrate({ buildId: "abc" }); await database.finalize({ - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }); await database.unlock(); @@ -253,7 +257,9 @@ test("migrate() succeeds with crash recovery after waiting for lock", async (con }, }); await database.migrate({ buildId: "abc" }); - await database.finalize({ checkpoint: createCheckpoint(10) }); + await database.finalize({ + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), + }); const databaseTwo = await createDatabase({ common: context.common, @@ -304,7 +310,7 @@ test("recoverCheckpoint() with crash recovery reverts rows", async (context) => .values({ address: zeroAddress, balance: 10n }); await database.complete({ - checkpoint: createCheckpoint(9), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 9n }), }); await indexingStore @@ -312,11 +318,11 @@ test("recoverCheckpoint() with crash recovery reverts rows", async (context) => .values({ address: "0x0000000000000000000000000000000000000001" }); await database.complete({ - checkpoint: createCheckpoint(11), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 11n }), }); await database.finalize({ - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }); await database.unlock(); @@ -337,7 +343,9 @@ test("recoverCheckpoint() with crash recovery reverts rows", async (context) => await databaseTwo.migrate({ buildId: "abc" }); const checkpoint = await databaseTwo.recoverCheckpoint(); - expect(checkpoint).toMatchObject(createCheckpoint(10)); + expect(checkpoint).toStrictEqual( + createCheckpoint({ chainId: 1n, blockNumber: 10n }), + ); const rows = await databaseTwo.qb.drizzle .execute(sql`SELECT * from "account"`) @@ -383,7 +391,7 @@ test("recoverCheckpoint() with crash recovery drops indexes and triggers", async await database.migrate({ buildId: "abc" }); await database.finalize({ - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }); await database.createIndexes(); @@ -485,7 +493,7 @@ test("finalize()", async (context) => { .values({ address: zeroAddress, balance: 10n }); await database.complete({ - checkpoint: createCheckpoint(9), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 9n }), }); await indexingStore @@ -497,11 +505,11 @@ test("finalize()", async (context) => { .values({ address: "0x0000000000000000000000000000000000000001" }); await database.complete({ - checkpoint: createCheckpoint(11), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 11n }), }); await database.finalize({ - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }); // reorg tables @@ -519,10 +527,12 @@ test("finalize()", async (context) => { .selectFrom("_ponder_meta") .where("key", "=", "app") .select("value") - .executeTakeFirst(); + .executeTakeFirstOrThrow() + .then(({ value }) => value); - // @ts-ignore - expect(metadata?.value?.checkpoint).toBe(createCheckpoint(10)); + expect(metadata.checkpoint).toStrictEqual( + createCheckpoint({ chainId: 1n, blockNumber: 10n }), + ); await database.kill(); }); @@ -639,7 +649,7 @@ test("createTriggers()", async (context) => { balance: "10", operation: 0, operation_id: 1, - checkpoint: encodeCheckpoint(maxCheckpoint), + checkpoint: MAX_CHECKPOINT_STRING, }, ]); @@ -695,7 +705,7 @@ test("complete()", async (context) => { .values({ address: zeroAddress, balance: 10n }); await database.complete({ - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }); const rows = await database.qb.user @@ -709,7 +719,7 @@ test("complete()", async (context) => { balance: "10", operation: 0, operation_id: 1, - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }, ]); @@ -746,7 +756,7 @@ test("revert()", async (context) => { .values({ address: zeroAddress, balance: 10n }); await database.complete({ - checkpoint: createCheckpoint(9), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 9n }), }); await indexingStore @@ -758,11 +768,11 @@ test("revert()", async (context) => { .values({ address: "0x0000000000000000000000000000000000000001" }); await database.complete({ - checkpoint: createCheckpoint(11), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 11n }), }); await database.revert({ - checkpoint: createCheckpoint(10), + checkpoint: createCheckpoint({ chainId: 1n, blockNumber: 10n }), }); const rows = await database.qb.user diff --git a/packages/core/src/database/index.ts b/packages/core/src/database/index.ts index 039b9471c..109e35822 100644 --- a/packages/core/src/database/index.ts +++ b/packages/core/src/database/index.ts @@ -17,10 +17,9 @@ import { } from "@/sync-store/migrations.js"; import type { Drizzle } from "@/types/db.js"; import { + MAX_CHECKPOINT_STRING, + ZERO_CHECKPOINT_STRING, decodeCheckpoint, - encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; import { formatEta } from "@/utils/format.js"; import { createPool, createReadonlyPool } from "@/utils/pg.js"; @@ -91,8 +90,8 @@ export type PonderInternalSchema = { } & { [_: ReturnType[number]["reorg"]]: unknown & { operation_id: number; - checkpoint: string; operation: 0 | 1 | 2; + checkpoint: string; }; }; @@ -127,7 +126,7 @@ export const createDatabase = async ({ }: { common: Common; namespace: NamespaceBuild; - preBuild: PreBuild; + preBuild: Pick; schemaBuild: Omit; }): Promise => { let heartbeatInterval: NodeJS.Timeout | undefined; @@ -160,6 +159,9 @@ export const createDatabase = async ({ await driver.instance.query(`CREATE SCHEMA IF NOT EXISTS "${namespace}"`); await driver.instance.query(`SET search_path TO "${namespace}"`); + await driver.instance.query(`CREATE SCHEMA IF NOT EXISTS "${namespace}"`); + await driver.instance.query(`SET search_path TO "${namespace}"`); + qb = { internal: new Kysely({ dialect: kyselyDialect, @@ -340,20 +342,15 @@ export const createDatabase = async ({ // Helpers //////// - /** - * Undo operations in user tables by using the "reorg" tables. - * - * Note: "reorg" tables may contain operations that have not been applied to the - * underlying tables, but only be 1 operation at most. - */ + /** Undo operations in user tables by using the "reorg" tables. */ const revert = async ({ + tx, tableName, checkpoint, - tx, }: { + tx: Transaction; tableName: ReturnType[number]; checkpoint: string; - tx: Transaction; }) => { const primaryKeyColumns = getPrimaryKeyColumns( schemaBuild.schema[tableName.js] as PgTable, @@ -432,7 +429,8 @@ export const createDatabase = async ({ }); }; - let checkpoint: string | undefined; + /** 'true' if `migrate` created new tables. */ + let createdTables: boolean; const database = { driver, @@ -839,7 +837,7 @@ export const createDatabase = async ({ .executeTakeFirst() .then((row) => row?.value); - let createdTables = false; + createdTables = false; if (previousApp === undefined) { await createEnums(); @@ -850,7 +848,7 @@ export const createDatabase = async ({ (process.env.PONDER_EXPERIMENTAL_DB === "platform" && previousApp.build_id !== buildId) || (process.env.PONDER_EXPERIMENTAL_DB === "platform" && - previousApp.checkpoint === encodeCheckpoint(zeroCheckpoint)) + previousApp.checkpoint === ZERO_CHECKPOINT_STRING) ) { for (const tableName of getTableNames(schemaBuild.schema)) { await tx.schema @@ -887,14 +885,12 @@ export const createDatabase = async ({ // write metadata - checkpoint = encodeCheckpoint(zeroCheckpoint); - const newApp = { is_locked: 1, is_dev: common.options.command === "dev" ? 1 : 0, heartbeat_at: Date.now(), build_id: buildId, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, table_names: getTableNames(schemaBuild.schema).map( ({ sql }) => sql, ), @@ -1012,9 +1008,8 @@ export const createDatabase = async ({ }, common.options.databaseHeartbeatInterval); }, async recoverCheckpoint() { - if (checkpoint !== undefined) { - return checkpoint; - } + // new tables are empty + if (createdTables) return ZERO_CHECKPOINT_STRING; return this.wrap( { method: "recoverCheckpoint", includeTraceLogs: true }, @@ -1027,7 +1022,7 @@ export const createDatabase = async ({ .executeTakeFirstOrThrow() .then((row) => row.value); - if (app.checkpoint === encodeCheckpoint(zeroCheckpoint)) { + if (app.checkpoint === ZERO_CHECKPOINT_STRING) { for (const tableName of getTableNames(schemaBuild.schema)) { await sql .raw( @@ -1108,13 +1103,13 @@ RETURNS TRIGGER AS $$ BEGIN IF TG_OP = 'INSERT' THEN INSERT INTO "${namespace}"."${tableName.reorg}" (${columnNames.join(",")}, operation, checkpoint) - VALUES (${columnNames.map((name) => `NEW.${name}`).join(",")}, 0, '${encodeCheckpoint(maxCheckpoint)}'); + VALUES (${columnNames.map((name) => `NEW.${name}`).join(",")}, 0, '${MAX_CHECKPOINT_STRING}'); ELSIF TG_OP = 'UPDATE' THEN INSERT INTO "${namespace}"."${tableName.reorg}" (${columnNames.join(",")}, operation, checkpoint) - VALUES (${columnNames.map((name) => `OLD.${name}`).join(",")}, 1, '${encodeCheckpoint(maxCheckpoint)}'); + VALUES (${columnNames.map((name) => `OLD.${name}`).join(",")}, 1, '${MAX_CHECKPOINT_STRING}'); ELSIF TG_OP = 'DELETE' THEN INSERT INTO "${namespace}"."${tableName.reorg}" (${columnNames.join(",")}, operation, checkpoint) - VALUES (${columnNames.map((name) => `OLD.${name}`).join(",")}, 2, '${encodeCheckpoint(maxCheckpoint)}'); + VALUES (${columnNames.map((name) => `OLD.${name}`).join(",")}, 2, '${MAX_CHECKPOINT_STRING}'); END IF; RETURN NULL; END; @@ -1151,13 +1146,9 @@ $$ LANGUAGE plpgsql await this.wrap({ method: "revert", includeTraceLogs: true }, () => Promise.all( getTableNames(schemaBuild.schema).map((tableName) => - qb.internal.transaction().execute((tx) => - revert({ - tableName, - checkpoint, - tx, - }), - ), + qb.internal + .transaction() + .execute((tx) => revert({ tx, tableName, checkpoint })), ), ), ); @@ -1201,7 +1192,7 @@ $$ LANGUAGE plpgsql await qb.internal .updateTable(tableName.reorg) .set({ checkpoint }) - .where("checkpoint", "=", encodeCheckpoint(maxCheckpoint)) + .where("checkpoint", "=", MAX_CHECKPOINT_STRING) .execute(); }, ), diff --git a/packages/core/src/drizzle/kit/index.ts b/packages/core/src/drizzle/kit/index.ts index 95b130ec7..06820f786 100644 --- a/packages/core/src/drizzle/kit/index.ts +++ b/packages/core/src/drizzle/kit/index.ts @@ -109,9 +109,9 @@ const createReorgTableStatement = (statement: JsonCreateTableStatement) => { generatePgSnapshot( [ pgTable("", { - operation_id: serial("operation_id").notNull().primaryKey(), - operation: integer("operation").notNull(), - checkpoint: varchar("checkpoint", { + operation_id: serial().notNull().primaryKey(), + operation: integer().notNull(), + checkpoint: varchar({ length: 75, }).notNull(), }), diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 6a812769c..7fe8443e5 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -19,10 +19,11 @@ export { import type { Config } from "@/config/index.js"; import type { Prettify } from "./types/utils.js"; -export type ContractConfig = Prettify; -export type NetworkConfig = Prettify; -export type BlockConfig = Prettify; export type DatabaseConfig = Prettify; +export type NetworksConfig = Prettify; +export type ContractsConfig = Prettify; +export type AccountsConfig = Prettify; +export type BlocksConfig = Prettify; export { onchainTable, diff --git a/packages/core/src/indexing-store/historical.test.ts b/packages/core/src/indexing-store/historical.test.ts index 98c017da3..65760da6e 100644 --- a/packages/core/src/indexing-store/historical.test.ts +++ b/packages/core/src/indexing-store/historical.test.ts @@ -9,7 +9,6 @@ import { NotNullConstraintError, UniqueConstraintError, } from "@/internal/errors.js"; -import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; import { eq } from "drizzle-orm"; import { pgTable } from "drizzle-orm/pg-core"; import { zeroAddress } from "viem"; @@ -33,7 +32,7 @@ test("find", async (context) => { common: context.common, schemaBuild: { schema }, database, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // empty @@ -73,7 +72,7 @@ test("insert", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // single @@ -220,7 +219,7 @@ test("update", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // setup @@ -286,7 +285,7 @@ test("delete", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // no entry @@ -334,7 +333,7 @@ test("flush", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // insert @@ -391,7 +390,7 @@ test("sql", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // setup @@ -469,7 +468,7 @@ test("sql followed by find", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); await indexingStore.sql @@ -502,7 +501,7 @@ test("onchain table", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // check error @@ -531,7 +530,7 @@ test("missing rows", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // error @@ -561,7 +560,7 @@ test("notNull", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // insert @@ -588,7 +587,7 @@ test("notNull", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); let error = await indexingStore @@ -622,7 +621,7 @@ test("default", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); await indexingStore.insert(schema.account).values({ address: zeroAddress }); @@ -650,7 +649,7 @@ test("$default", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); await indexingStore.insert(schema.account).values({ address: zeroAddress }); @@ -681,7 +680,7 @@ test("$onUpdateFn", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); // insert @@ -715,7 +714,7 @@ test("array", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); await indexingStore.insert(schema.account).values({ @@ -753,7 +752,7 @@ test("enum", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); await indexingStore.insert(schema.account).values({ @@ -789,7 +788,7 @@ test("json bigint", async (context) => { common: context.common, database, schemaBuild: { schema }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + isDatabaseEmpty: true, }); const error = await indexingStore diff --git a/packages/core/src/indexing-store/historical.ts b/packages/core/src/indexing-store/historical.ts index 1c9f9d24c..0a3e4e66f 100644 --- a/packages/core/src/indexing-store/historical.ts +++ b/packages/core/src/indexing-store/historical.ts @@ -13,7 +13,6 @@ import { UniqueConstraintError, } from "@/internal/errors.js"; import type { SchemaBuild } from "@/internal/types.js"; -import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; import { prettyPrint } from "@/utils/print.js"; import { createQueue } from "@ponder/common"; import { @@ -146,12 +145,12 @@ export const createHistoricalIndexingStore = ({ common, schemaBuild: { schema }, database, - initialCheckpoint, + isDatabaseEmpty, }: { common: Common; schemaBuild: Pick; database: Database; - initialCheckpoint: string; + isDatabaseEmpty: boolean; }): IndexingStore<"historical"> => { // Operation queue to make sure all queries are run in order, circumventing race conditions const queue = createQueue Promise>({ @@ -302,7 +301,6 @@ export const createHistoricalIndexingStore = ({ return size; }; - let isDatabaseEmpty = initialCheckpoint === encodeCheckpoint(zeroCheckpoint); /** Estimated number of bytes used by cache. */ let cacheBytes = 0; /** LRU counter. */ diff --git a/packages/core/src/indexing/index.ts b/packages/core/src/indexing/index.ts index eba45c6fa..8262a54f1 100644 --- a/packages/core/src/indexing/index.ts +++ b/packages/core/src/indexing/index.ts @@ -5,7 +5,6 @@ import { processEvents, processSetupEvents, setIndexingStore, - updateTotalSeconds, } from "./service.js"; import type { Context, Service } from "./service.js"; @@ -14,7 +13,6 @@ const methods = { kill, processEvents, processSetupEvents, - updateTotalSeconds, setIndexingStore, }; diff --git a/packages/core/src/indexing/service.test.ts b/packages/core/src/indexing/service.test.ts index 2a480fe0b..962ab1bbd 100644 --- a/packages/core/src/indexing/service.test.ts +++ b/packages/core/src/indexing/service.test.ts @@ -11,9 +11,9 @@ import { getErc20ConfigAndIndexingFunctions } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; import { onchainTable } from "@/drizzle/onchain.js"; import type { RawEvent } from "@/internal/types.js"; +import { createRpc } from "@/rpc/index.js"; import { decodeEvents } from "@/sync/events.js"; -import { createSync } from "@/sync/index.js"; -import { encodeCheckpoint, zeroCheckpoint } from "@/utils/checkpoint.js"; +import { ZERO_CHECKPOINT_STRING } from "@/utils/checkpoint.js"; import { promiseWithResolvers } from "@ponder/common"; import { checksumAddress, padHex, parseEther, toHex, zeroAddress } from "viem"; import { encodeEventTopics } from "viem/utils"; @@ -41,7 +41,7 @@ const schema = { account }; const { config, rawIndexingFunctions } = getErc20ConfigAndIndexingFunctions({ address: zeroAddress, }); -const { sources, networks } = await buildConfigAndIndexingFunctions({ +const { sources, chains } = await buildConfigAndIndexingFunctions({ config, rawIndexingFunctions, }); @@ -53,26 +53,15 @@ test("createIndexing()", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -90,33 +79,22 @@ test("processSetupEvents() empty", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); const result = await processSetupEvents(indexingService, { sources, - networks, + chains, }); expect(result).toStrictEqual({ status: "success" }); @@ -131,18 +109,6 @@ test("processSetupEvents()", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:setup": vi.fn(), }; @@ -151,17 +117,18 @@ test("processSetupEvents()", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); const result = await processSetupEvents(indexingService, { sources, - networks, + chains, }); expect(result).toStrictEqual({ status: "success" }); @@ -194,18 +161,6 @@ test("processEvent()", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), @@ -216,10 +171,11 @@ test("processEvent()", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -238,7 +194,7 @@ test("processEvent()", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -298,18 +254,6 @@ test("processEvents killed", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), @@ -320,10 +264,11 @@ test("processEvents killed", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -343,7 +288,7 @@ test("processEvents killed", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -375,18 +320,6 @@ test("processEvents eventCount", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), @@ -396,10 +329,11 @@ test("processEvents eventCount", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -418,7 +352,7 @@ test("processEvents eventCount", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -448,18 +382,6 @@ test("executeSetup() context.client", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:setup": async ({ context }: { context: Context }) => { await context.client.getBalance({ @@ -472,10 +394,11 @@ test("executeSetup() context.client", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -487,7 +410,7 @@ test("executeSetup() context.client", async (context) => { const result = await processSetupEvents(indexingService, { sources, - networks, + chains, }); expect(result).toStrictEqual({ status: "success" }); @@ -507,18 +430,6 @@ test("executeSetup() context.db", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:setup": async ({ context }: { context: Context }) => { await context.db @@ -531,10 +442,11 @@ test("executeSetup() context.db", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -543,7 +455,7 @@ test("executeSetup() context.db", async (context) => { const result = await processSetupEvents(indexingService, { sources, - networks, + chains, }); expect(result).toStrictEqual({ status: "success" }); @@ -566,18 +478,6 @@ test("executeSetup() metrics", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { @@ -585,16 +485,17 @@ test("executeSetup() metrics", async (context) => { "Erc20:setup": vi.fn(), }, sources, - networks, + chains, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); const result = await processSetupEvents(indexingService, { sources, - networks, + chains, }); expect(result).toStrictEqual({ status: "success" }); @@ -611,18 +512,6 @@ test("executeSetup() error", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:setup": vi.fn(), }; @@ -631,10 +520,11 @@ test("executeSetup() error", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -643,7 +533,7 @@ test("executeSetup() error", async (context) => { const result = await processSetupEvents(indexingService, { sources, - networks, + chains, }); expect(result).toStrictEqual({ status: "error", error: expect.any(Error) }); @@ -659,18 +549,6 @@ test("processEvents() context.client", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const clientCall = async ({ context }: { context: Context }) => { await context.client.getBalance({ address: BOB, @@ -685,9 +563,10 @@ test("processEvents() context.client", async (context) => { clientCall, }, sources, - networks, + chains, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -711,7 +590,7 @@ test("processEvents() context.client", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -742,18 +621,6 @@ test("processEvents() context.db", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - let i = 0; const dbCall = async ({ context }: { event: any; context: Context }) => { @@ -771,9 +638,10 @@ test("processEvents() context.db", async (context) => { dbCall, }, sources, - networks, + chains, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -794,7 +662,7 @@ test("processEvents() context.db", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -826,18 +694,6 @@ test("processEvents() metrics", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { @@ -846,9 +702,10 @@ test("processEvents() metrics", async (context) => { vi.fn(), }, sources, - networks, + chains, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -867,7 +724,7 @@ test("processEvents() metrics", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -895,18 +752,6 @@ test("processEvents() error", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": vi.fn(), @@ -916,10 +761,11 @@ test("processEvents() error", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -942,7 +788,7 @@ test("processEvents() error", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -977,18 +823,6 @@ test("processEvents() error with missing event object properties", async (contex { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const throwError = async ({ event }: { event: any; context: Context }) => { // biome-ignore lint/performance/noDelete: delete event.transaction; @@ -1005,9 +839,10 @@ test("processEvents() error with missing event object properties", async (contex indexingBuild: { indexingFunctions, sources, - networks, + chains, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1026,7 +861,7 @@ test("processEvents() error with missing event object properties", async (contex const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -1056,18 +891,6 @@ test("execute() error after killed", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const { promise, reject } = promiseWithResolvers(); const indexingFunctions = { "Erc20:Transfer(address indexed from, address indexed to, uint256 amount)": @@ -1078,10 +901,11 @@ test("execute() error after killed", async (context) => { common, indexingBuild: { sources, - networks, + chains, indexingFunctions, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1100,7 +924,7 @@ test("execute() error after killed", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -1129,26 +953,15 @@ test("ponderActions getBalance()", async (context) => { { schemaBuild: { schema } }, ); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1171,26 +984,15 @@ test("ponderActions getCode()", async (context) => { const { address } = await deployErc20({ sender: ALICE }); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1219,26 +1021,15 @@ test("ponderActions getStorageAt()", async (context) => { sender: ALICE, }); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1269,26 +1060,15 @@ test("ponderActions readContract()", async (context) => { sender: ALICE, }); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1319,26 +1099,15 @@ test("ponderActions readContract() blockNumber", async (context) => { sender: ALICE, }); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); @@ -1371,26 +1140,15 @@ test.skip("ponderActions multicall()", async (context) => { sender: ALICE, }); - const sync = await createSync({ - common, - syncStore, - indexingBuild: { - sources, - networks, - }, - onRealtimeEvent: () => Promise.resolve(), - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), - }); - const indexingService = create({ common, indexingBuild: { sources, - networks, + chains, indexingFunctions: {}, }, - sync, + rpcs: [createRpc({ chain: chains[0]!, common })], + syncStore, }); setIndexingStore(indexingService, indexingStore); diff --git a/packages/core/src/indexing/service.ts b/packages/core/src/indexing/service.ts index eb8234338..6eeec67da 100644 --- a/packages/core/src/indexing/service.ts +++ b/packages/core/src/indexing/service.ts @@ -1,35 +1,37 @@ import type { IndexingStore } from "@/indexing-store/index.js"; import type { Common } from "@/internal/common.js"; import type { + Chain, ContractSource, Event, IndexingBuild, IndexingFunctions, - Network, Schema, SetupEvent, Source, } from "@/internal/types.js"; +import type { RPC } from "@/rpc/index.js"; +import type { SyncStore } from "@/sync-store/index.js"; import { isAddressFactory } from "@/sync/filter.js"; -import type { Sync } from "@/sync/index.js"; +import { cachedTransport } from "@/sync/transport.js"; import type { Db } from "@/types/db.js"; import type { Block, Log, Trace, Transaction } from "@/types/eth.js"; import type { DeepPartial } from "@/types/utils.js"; import { - type Checkpoint, + ZERO_CHECKPOINT, decodeCheckpoint, encodeCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; import { prettyPrint } from "@/utils/print.js"; import { startClock } from "@/utils/timer.js"; -import type { Abi, Address } from "viem"; +import type { Abi, Address, Chain as ViemChain } from "viem"; import { checksumAddress, createClient } from "viem"; import { addStackTrace } from "./addStackTrace.js"; -import { type ReadOnlyClient, getPonderActions } from "./ponderActions.js"; +import type { ReadOnlyClient } from "./ponderActions.js"; +import { getPonderActions } from "./ponderActions.js"; export type Context = { - network: { chainId: number; name: string }; + chain: ViemChain; client: ReadOnlyClient; db: Db; contracts: Record< @@ -54,7 +56,6 @@ export type Service = { eventCount: { [eventName: string]: number; }; - startCheckpoint: Checkpoint; /** * Reduce memory usage by reserving space for objects ahead of time @@ -68,22 +69,24 @@ export type Service = { }; // static cache - networkByChainId: { [chainId: number]: Network }; + chainById: { [chainId: number]: Chain }; clientByChainId: { [chainId: number]: Context["client"] }; contractsByChainId: { [chainId: number]: Context["contracts"] }; }; export const create = ({ common, - indexingBuild: { sources, networks, indexingFunctions }, - sync, + indexingBuild: { sources, chains, indexingFunctions }, + rpcs, + syncStore, }: { common: Common; indexingBuild: Pick< IndexingBuild, - "sources" | "networks" | "indexingFunctions" + "sources" | "chains" | "indexingFunctions" >; - sync: Sync; + rpcs: RPC[]; + syncStore: SyncStore; }): Service => { const contextState: Service["currentEvent"]["contextState"] = { blockNumber: undefined!, @@ -91,13 +94,10 @@ export const create = ({ const clientByChainId: Service["clientByChainId"] = {}; const contractsByChainId: Service["contractsByChainId"] = {}; - const networkByChainId = networks.reduce( - (acc, cur) => { - acc[cur.chainId] = cur; - return acc; - }, - {}, - ); + const chainById = chains.reduce((acc, cur) => { + acc[cur.chain.id] = cur; + return acc; + }, {}); // build contractsByChainId for (const source of sources) { @@ -139,11 +139,13 @@ export const create = ({ } // build clientByChainId - for (const network of networks) { - const transport = sync.getCachedTransport(network); - clientByChainId[network.chainId] = createClient({ - transport, - chain: network.chain, + for (let i = 0; i < chains.length; i++) { + const chain = chains[i]!; + const rpc = rpcs[i]!; + + clientByChainId[chain.chain.id] = createClient({ + transport: cachedTransport({ rpc, syncStore }), + chain: chain.chain, // @ts-ignore }).extend(getPonderActions(contextState)); } @@ -159,17 +161,16 @@ export const create = ({ indexingFunctions, isKilled: false, eventCount, - startCheckpoint: decodeCheckpoint(sync.getStartCheckpoint()), currentEvent: { contextState, context: { - network: { name: undefined!, chainId: undefined! }, + chain: undefined!, contracts: undefined!, client: undefined!, db: undefined!, }, }, - networkByChainId, + chainById, clientByChainId, contractsByChainId, }; @@ -179,10 +180,10 @@ export const processSetupEvents = async ( indexingService: Service, { sources, - networks, + chains, }: { sources: Source[]; - networks: Network[]; + chains: Chain[]; }, ): Promise< | { status: "error"; error: Error } @@ -194,12 +195,12 @@ export const processSetupEvents = async ( const [contractName] = eventName.split(":"); - for (const network of networks) { + for (const chain of chains) { const source = sources.find( (s) => s.type === "contract" && s.name === contractName && - s.filter.chainId === network.chainId, + s.filter.chainId === chain.chain.id, ) as ContractSource | undefined; if (source === undefined) continue; @@ -211,10 +212,10 @@ export const processSetupEvents = async ( const result = await executeSetup(indexingService, { event: { type: "setup", - chainId: network.chainId, + chainId: chain.chain.id, checkpoint: encodeCheckpoint({ - ...zeroCheckpoint, - chainId: BigInt(network.chainId), + ...ZERO_CHECKPOINT, + chainId: BigInt(chain.chain.id), blockNumber: BigInt(source.filter.fromBlock ?? 0), }), @@ -262,40 +263,8 @@ export const processEvents = async ( service: "indexing", msg: `Completed indexing function (event="${event.name}", checkpoint=${event.checkpoint})`, }); - - // periodically update metrics - if (i % 93 === 0) { - updateCompletedEvents(indexingService); - - const eventTimestamp = decodeCheckpoint(event.checkpoint).blockTimestamp; - - indexingService.common.metrics.ponder_indexing_completed_seconds.set( - eventTimestamp - indexingService.startCheckpoint.blockTimestamp, - ); - indexingService.common.metrics.ponder_indexing_completed_timestamp.set( - eventTimestamp, - ); - - // Note: allows for terminal and logs to be updated - await new Promise(setImmediate); - } } - // set completed seconds - if (events.length > 0) { - const lastEventInBatchTimestamp = decodeCheckpoint( - events[events.length - 1]!.checkpoint, - ).blockTimestamp; - - indexingService.common.metrics.ponder_indexing_completed_seconds.set( - lastEventInBatchTimestamp - - indexingService.startCheckpoint.blockTimestamp, - ); - indexingService.common.metrics.ponder_indexing_completed_timestamp.set( - lastEventInBatchTimestamp, - ); - } - // set completed events updateCompletedEvents(indexingService); return { status: "success" }; @@ -322,16 +291,6 @@ export const kill = (indexingService: Service) => { indexingService.isKilled = true; }; -export const updateTotalSeconds = ( - indexingService: Service, - endCheckpoint: Checkpoint, -) => { - indexingService.common.metrics.ponder_indexing_total_seconds.set( - endCheckpoint.blockTimestamp - - indexingService.startCheckpoint.blockTimestamp, - ); -}; - const updateCompletedEvents = (indexingService: Service) => { for (const event of Object.keys(indexingService.eventCount)) { const metricLabel = { @@ -356,7 +315,7 @@ const executeSetup = async ( common, indexingFunctions, currentEvent, - networkByChainId, + chainById, contractsByChainId, clientByChainId, } = indexingService; @@ -365,8 +324,7 @@ const executeSetup = async ( try { // set currentEvent - currentEvent.context.network.chainId = event.chainId; - currentEvent.context.network.name = networkByChainId[event.chainId]!.name; + currentEvent.context.chain = chainById[event.chainId]!.chain; currentEvent.context.client = clientByChainId[event.chainId]!; currentEvent.context.contracts = contractsByChainId[event.chainId]!; currentEvent.contextState.blockNumber = event.block; @@ -391,7 +349,7 @@ const executeSetup = async ( const decodedCheckpoint = decodeCheckpoint(event.checkpoint); common.logger.error({ service: "indexing", - msg: `Error while processing '${event.name}' event in '${networkByChainId[event.chainId]!.name}' block ${decodedCheckpoint.blockNumber}`, + msg: `Error while processing '${event.name}' event in '${chainById[event.chainId]!.chain.name}' block ${decodedCheckpoint.blockNumber}`, error, }); @@ -415,7 +373,7 @@ const executeEvent = async ( common, indexingFunctions, currentEvent, - networkByChainId, + chainById, contractsByChainId, clientByChainId, } = indexingService; @@ -424,8 +382,7 @@ const executeEvent = async ( try { // set currentEvent - currentEvent.context.network.chainId = event.chainId; - currentEvent.context.network.name = networkByChainId[event.chainId]!.name; + currentEvent.context.chain = chainById[event.chainId]!.chain; currentEvent.context.client = clientByChainId[event.chainId]!; currentEvent.context.contracts = contractsByChainId[event.chainId]!; currentEvent.contextState.blockNumber = event.event.block.number; @@ -452,7 +409,7 @@ const executeEvent = async ( common.logger.error({ service: "indexing", - msg: `Error while processing '${event.name}' event in '${networkByChainId[event.chainId]!.name}' block ${decodedCheckpoint.blockNumber}`, + msg: `Error while processing '${event.name}' event in '${chainById[event.chainId]!.chain.name}' block ${decodedCheckpoint.blockNumber}`, error, }); diff --git a/packages/core/src/internal/metrics.ts b/packages/core/src/internal/metrics.ts index 32eaa1327..8c5a0fd0c 100644 --- a/packages/core/src/internal/metrics.ts +++ b/packages/core/src/internal/metrics.ts @@ -17,14 +17,16 @@ const httpRequestSizeBytes = [ export class MetricsService { registry: prometheus.Registry; + start_timestamp: number; - ponder_indexing_total_seconds: prometheus.Gauge; - ponder_indexing_completed_seconds: prometheus.Gauge; - ponder_indexing_completed_events: prometheus.Gauge<"event">; + ponder_historical_total_indexing_seconds: prometheus.Gauge<"network">; + ponder_historical_cached_indexing_seconds: prometheus.Gauge<"network">; + ponder_historical_completed_indexing_seconds: prometheus.Gauge<"network">; - ponder_indexing_completed_timestamp: prometheus.Gauge; - ponder_indexing_has_error: prometheus.Gauge; + ponder_indexing_timestamp: prometheus.Gauge<"network">; + ponder_indexing_has_error: prometheus.Gauge<"network">; + ponder_indexing_completed_events: prometheus.Gauge<"event">; ponder_indexing_function_duration: prometheus.Histogram<"event">; ponder_indexing_abi_decoding_duration: prometheus.Histogram; @@ -56,7 +58,6 @@ export class MetricsService { >; ponder_rpc_request_duration: prometheus.Histogram<"network" | "method">; - ponder_rpc_request_lag: prometheus.Histogram<"network" | "method">; ponder_postgres_query_total: prometheus.Counter<"pool">; ponder_postgres_query_queue_size: prometheus.Gauge<"pool"> = null!; @@ -64,15 +65,24 @@ export class MetricsService { constructor() { this.registry = new prometheus.Registry(); + this.start_timestamp = Date.now(); - this.ponder_indexing_total_seconds = new prometheus.Gauge({ - name: "ponder_indexing_total_seconds", + this.ponder_historical_total_indexing_seconds = new prometheus.Gauge({ + name: "ponder_historical_total_indexing_seconds", help: "Total number of seconds that are required", + labelNames: ["network"] as const, + registers: [this.registry], + }); + this.ponder_historical_cached_indexing_seconds = new prometheus.Gauge({ + name: "ponder_historical_cached_indexing_seconds", + help: "Number of seconds that have been cached", + labelNames: ["network"] as const, registers: [this.registry], }); - this.ponder_indexing_completed_seconds = new prometheus.Gauge({ - name: "ponder_indexing_completed_seconds", + this.ponder_historical_completed_indexing_seconds = new prometheus.Gauge({ + name: "ponder_historical_completed_indexing_seconds", help: "Number of seconds that have been completed", + labelNames: ["network"] as const, registers: [this.registry], }); this.ponder_indexing_completed_events = new prometheus.Gauge({ @@ -81,9 +91,10 @@ export class MetricsService { labelNames: ["network", "event"] as const, registers: [this.registry], }); - this.ponder_indexing_completed_timestamp = new prometheus.Gauge({ - name: "ponder_indexing_completed_timestamp", + this.ponder_indexing_timestamp = new prometheus.Gauge({ + name: "ponder_indexing_timestamp", help: "Timestamp through which all events have been completed", + labelNames: ["network"] as const, registers: [this.registry], }); this.ponder_indexing_has_error = new prometheus.Gauge({ @@ -218,13 +229,6 @@ export class MetricsService { buckets: httpRequestDurationMs, registers: [this.registry], }); - this.ponder_rpc_request_lag = new prometheus.Histogram({ - name: "ponder_rpc_request_lag", - help: "Time RPC requests spend waiting in the request queue", - labelNames: ["network", "method"] as const, - buckets: databaseQueryDurationMs, - registers: [this.registry], - }); this.ponder_postgres_query_total = new prometheus.Counter({ name: "ponder_postgres_query_total", @@ -245,10 +249,12 @@ export class MetricsService { } resetIndexingMetrics() { - this.ponder_indexing_total_seconds.reset(); - this.ponder_indexing_completed_seconds.reset(); + this.start_timestamp = Date.now(); + this.ponder_historical_total_indexing_seconds.reset(); + this.ponder_historical_cached_indexing_seconds.reset(); + this.ponder_historical_completed_indexing_seconds.reset(); this.ponder_indexing_completed_events.reset(); - this.ponder_indexing_completed_timestamp.reset(); + this.ponder_indexing_timestamp.reset(); this.ponder_indexing_has_error.reset(); this.ponder_indexing_function_duration.reset(); this.ponder_indexing_abi_decoding_duration.reset(); @@ -261,7 +267,6 @@ export class MetricsService { this.ponder_historical_completed_blocks.reset(); this.ponder_realtime_reorg_total.reset(); this.ponder_rpc_request_duration.reset(); - this.ponder_rpc_request_lag.reset(); // Note: These are used by both indexing and API services. this.ponder_database_method_duration.reset(); @@ -389,16 +394,33 @@ export async function getIndexingProgress(metrics: MetricsService) { .values[0]?.value; const hasError = hasErrorMetric === 1; - const totalSeconds = - (await metrics.ponder_indexing_total_seconds.get()).values[0]?.value ?? 0; + const sum = (x: number[]) => x.reduce((a, b) => a + b, 0); + const max = (x: number[]) => x.reduce((a, b) => Math.max(a, b), 0); + + const totalSeconds = await metrics.ponder_historical_total_indexing_seconds + .get() + .then(({ values }) => values.map(({ value }) => value)) + .then(sum); + const cachedSeconds = await metrics.ponder_historical_cached_indexing_seconds + .get() + .then(({ values }) => values.map(({ value }) => value)) + .then(sum); const completedSeconds = - (await metrics.ponder_indexing_completed_seconds.get()).values[0]?.value ?? - 0; - const completedToTimestamp = - (await metrics.ponder_indexing_completed_timestamp.get()).values[0]! - .value ?? 0; + await metrics.ponder_historical_completed_indexing_seconds + .get() + .then(({ values }) => values.map(({ value }) => value)) + .then(sum); + const timestamp = await metrics.ponder_indexing_timestamp + .get() + .then(({ values }) => values.map(({ value }) => value)) + .then(max); - const progress = totalSeconds === 0 ? 0 : completedSeconds / totalSeconds; + const progress = + timestamp === 0 + ? 0 + : totalSeconds === 0 + ? 1 + : (completedSeconds + cachedSeconds) / totalSeconds; const indexingCompletedEventsMetric = ( await metrics.ponder_indexing_completed_events.get() @@ -433,10 +455,10 @@ export async function getIndexingProgress(metrics: MetricsService) { return { hasError, overall: { - completedSeconds, totalSeconds, + cachedSeconds, + completedSeconds, progress, - completedToTimestamp, totalEvents, }, events, @@ -444,86 +466,25 @@ export async function getIndexingProgress(metrics: MetricsService) { } export async function getAppProgress(metrics: MetricsService): Promise<{ - mode: "historical" | "realtime" | "complete" | undefined; + mode: "historical" | "realtime" | undefined; progress: number; eta: number | undefined; }> { - const sync = await getSyncProgress(metrics); const indexing = await getIndexingProgress(metrics); - const decodingSum = await metrics.ponder_indexing_abi_decoding_duration - .get() - .then( - (m) => - m.values.find( - (v) => v.metricName === "ponder_indexing_abi_decoding_duration_sum", - )?.value, - ); - const getEventsSum = await metrics.ponder_database_method_duration - .get() - .then( - (m) => - m.values.find( - (v) => - v.labels.method === "getEvents" && - v.metricName === "ponder_database_method_duration_sum", - )?.value, - ); - const indexingSum = indexing.events.reduce( - (acc, cur) => acc + cur.averageDuration * cur.count, - 0, - ); - - let maxSync: (typeof sync)[number] | undefined; - for (const networkSync of sync) { - if ( - maxSync === undefined || - maxSync.eta === undefined || - (networkSync.eta && networkSync.eta > maxSync.eta) - ) { - maxSync = networkSync; - } - } const remainingSeconds = - indexing.overall.totalSeconds - indexing.overall.completedSeconds; + indexing.overall.totalSeconds - + (indexing.overall.completedSeconds + indexing.overall.cachedSeconds); + const elapsedSeconds = (Date.now() - metrics.start_timestamp) / 1_000; - const indexingEta = + const eta = indexing.overall.completedSeconds === 0 - ? undefined - : (((decodingSum ?? 0) + (getEventsSum ?? 0) + indexingSum) * - remainingSeconds) / - indexing.overall.completedSeconds; - - const eta = sync.every((n) => n.progress === 1) - ? indexingEta - : maxSync?.eta === undefined && indexingEta === undefined - ? undefined - : maxSync?.eta === undefined && maxSync?.progress !== undefined - ? undefined - : Math.max(maxSync?.eta ?? 0, indexingEta ?? 0); - - // Edge case: If all matched events occurred in the same unix timestamp (second), progress will - // be zero, even though indexing is complete. When this happens, totalEvents will be non-zero. - const indexingProgress = - indexing.overall.progress === 0 && indexing.overall.totalEvents > 0 - ? 1 - : indexing.overall.progress; - - const progress = sync.every((n) => n.progress === 1) - ? indexingProgress - : maxSync?.progress === undefined ? 0 - : maxSync!.progress * indexingProgress; + : (elapsedSeconds / indexing.overall.completedSeconds) * remainingSeconds; return { - mode: sync.some((n) => n.status === "realtime") - ? "realtime" - : sync.every((n) => n.status === "complete") - ? "complete" - : sync.length === 0 - ? undefined - : "historical", - progress, + mode: indexing.overall.progress === 1 ? "realtime" : "historical", + progress: indexing.overall.progress, eta, }; } diff --git a/packages/core/src/internal/telemetry.ts b/packages/core/src/internal/telemetry.ts index 68cf502de..5dc192aa9 100644 --- a/packages/core/src/internal/telemetry.ts +++ b/packages/core/src/internal/telemetry.ts @@ -297,7 +297,7 @@ export function buildPayload({ return { database_kind: preBuild?.databaseConfig.kind, contract_count: indexingBuild?.sources.length ?? 0, - network_count: indexingBuild?.networks.length ?? 0, + network_count: indexingBuild?.chains.length ?? 0, table_count, indexing_function_count, }; diff --git a/packages/core/src/internal/types.ts b/packages/core/src/internal/types.ts index 634ee7f4a..48ee21ff9 100644 --- a/packages/core/src/internal/types.ts +++ b/packages/core/src/internal/types.ts @@ -13,7 +13,14 @@ import type { PGliteOptions } from "@/utils/pglite.js"; import type { PGlite } from "@electric-sql/pglite"; import type { Hono } from "hono"; import type { PoolConfig } from "pg"; -import type { Abi, Address, Chain, Hex, LogTopic, Transport } from "viem"; +import type { + Abi, + Address, + Hex, + LogTopic, + Transport, + Chain as ViemChain, +} from "viem"; // Database @@ -266,30 +273,28 @@ export type ContractMetadata = { abiEvents: AbiEvents; abiFunctions: AbiFunctions; name: string; - network: Network; + chain: Chain; }; export type AccountMetadata = { type: "account"; name: string; - network: Network; + chain: Chain; }; export type BlockMetadata = { type: "block"; name: string; - network: Network; + chain: Chain; }; -// Network +// Chain -export type Network = { - name: string; - chainId: number; - chain: Chain; - transport: ReturnType; +export type Chain = { + chain: ViemChain; + rpcUrl: string | string[] | Transport; pollingInterval: number; maxRequestsPerSecond: number; - finalityBlockCount: number; disableCache: boolean; + finalityBlockCount: number; }; // Schema @@ -306,6 +311,8 @@ export type NamespaceBuild = string; export type PreBuild = { /** Database type and configuration */ databaseConfig: DatabaseConfig; + /** Ordering of events */ + mode: "omnichain" | "multichain"; }; export type SchemaBuild = { @@ -319,8 +326,8 @@ export type IndexingBuild = { buildId: string; /** Sources to index. */ sources: Source[]; - /** Networks to index. */ - networks: Network[]; + /** Chains to index. */ + chains: Chain[]; /** Event callbacks for all `sources`. */ indexingFunctions: IndexingFunctions; }; @@ -336,7 +343,7 @@ export type ApiBuild = { // Status -/** Closest-to-tip indexed block per network. */ +/** Closest-to-tip indexed block per chain. */ export type Status = { [network: string]: { block: { number: number; timestamp: number } | null; @@ -344,6 +351,12 @@ export type Status = { }; }; +// Seconds + +export type Seconds = { + [network: string]: { start: number; end: number; cached: number }; +}; + // Events export type RawEvent = { diff --git a/packages/core/src/rpc/index.test.ts b/packages/core/src/rpc/index.test.ts new file mode 100644 index 000000000..73323c114 --- /dev/null +++ b/packages/core/src/rpc/index.test.ts @@ -0,0 +1,15 @@ +import { setupAnvil, setupCommon } from "@/_test/setup.js"; +import { getChain } from "@/_test/utils.js"; +import { beforeEach, expect, test } from "vitest"; +import { createRpc } from "./index.js"; +beforeEach(setupCommon); +beforeEach(setupAnvil); + +test("requests", async ({ common }) => { + const chain = getChain(); + const rpc = createRpc({ chain, common }); + + const chainId = await rpc.request({ method: "eth_chainId" }); + + expect(chainId).toBe("0x1"); +}); diff --git a/packages/core/src/rpc/index.ts b/packages/core/src/rpc/index.ts new file mode 100644 index 000000000..a69ed427c --- /dev/null +++ b/packages/core/src/rpc/index.ts @@ -0,0 +1,243 @@ +import type { Common } from "@/internal/common.js"; +import type { Chain } from "@/internal/types.js"; +import { startClock } from "@/utils/timer.js"; +import { wait } from "@/utils/wait.js"; +import { createQueue } from "@ponder/common"; +import { + http, + type EIP1193Parameters, + HttpRequestError, + type HttpTransport, + JsonRpcVersionUnsupportedError, + MethodNotFoundRpcError, + MethodNotSupportedRpcError, + ParseRpcError, + type PublicRpcSchema, + type WebSocketTransport, + webSocket, +} from "viem"; +import type { DebugRpcSchema } from "../utils/debug.js"; + +type Schema = [...PublicRpcSchema, ...DebugRpcSchema]; + +type RequestReturnType["method"]> = + Extract["ReturnType"]; + +type SubscribeParameters = Parameters< + NonNullable["value"]>["subscribe"] +>[0]; + +type SubscribeReturnType = Awaited< + ReturnType["value"]>["subscribe"]> +>; + +export type RPC = { + request: >( + parameters: TParameters, + ) => Promise>; + subscribe: (params: SubscribeParameters) => Promise; + supports: ( + method: EIP1193Parameters["method"] | "eth_subscribe", + ) => boolean; +}; + +const RETRY_COUNT = 9; +const BASE_DURATION = 125; + +export const createRpc = ({ + common, + chain, +}: { + common: Common; + chain: Omit; +}): RPC => { + let httpIndex = 0; + let wsIndex = 0; + const httpTransports: ReturnType[] = []; + const wsTransports: ReturnType[] = []; + + if (typeof chain.rpcUrl === "string") { + if ( + new URL(chain.rpcUrl).protocol === "http" || + new URL(chain.rpcUrl).protocol === "https" + ) { + httpTransports.push(http(chain.rpcUrl)({ chain: chain.chain })); + } else if ( + new URL(chain.rpcUrl).protocol === "ws" || + new URL(chain.rpcUrl).protocol === "wss" + ) { + wsTransports.push(webSocket(chain.rpcUrl)({ chain: chain.chain })); + } + } else if (Array.isArray(chain.rpcUrl)) { + for (const url of chain.rpcUrl) { + if ( + new URL(url).protocol === "http:" || + new URL(url).protocol === "https:" + ) { + httpTransports.push(http(url)({ chain: chain.chain })); + } else if ( + new URL(url).protocol === "ws:" || + new URL(url).protocol === "wss:" + ) { + wsTransports.push(webSocket(url)({ chain: chain.chain })); + } + } + } + + const requestQueue = createQueue< + Awaited>, + Parameters[0] + >({ + frequency: chain.maxRequestsPerSecond, + concurrency: Math.ceil(chain.maxRequestsPerSecond / 4), + initialStart: true, + browser: false, + // @ts-ignore + worker: async (request) => { + for (let i = 0; i <= RETRY_COUNT; i++) { + try { + const stopClock = startClock(); + common.logger.trace({ + service: "rpc", + msg: `Sent ${request.method} request (params=${JSON.stringify(request.params)})`, + }); + + const responsePromise = httpTransports[httpIndex++]!.request(request); + if (httpIndex === httpTransports.length) httpIndex = 0; + const response = await responsePromise; + + common.logger.trace({ + service: "rpc", + msg: `Received ${request.method} response (duration=${stopClock()}, params=${JSON.stringify(request.params)})`, + }); + common.metrics.ponder_rpc_request_duration.observe( + { method: request.method, network: chain.chain.name }, + stopClock(), + ); + + return response as RequestReturnType; + } catch (_error) { + const error = _error as Error; + + // TODO(kyle) log ranges + + if (shouldRetry(error) === false) { + common.logger.warn({ + service: "rpc", + msg: `Failed ${request.method} request`, + }); + throw error; + } + + if (i === RETRY_COUNT) { + common.logger.warn({ + service: "rpc", + msg: `Failed ${request.method} request after ${i + 1} attempts`, + error, + }); + throw error; + } + + const duration = BASE_DURATION * 2 ** i; + common.logger.debug({ + service: "rpc", + msg: `Failed ${request.method} request, retrying after ${duration} milliseconds`, + error, + }); + await wait(duration); + } + } + }, + }); + + return { + // @ts-ignore + request: requestQueue.add, + // @ts-ignore + subscribe: async (request) => { + for (let i = 0; i <= RETRY_COUNT; i++) { + try { + const stopClock = startClock(); + common.logger.trace({ + service: "rpc", + msg: `Sent eth_subscribe request (params=${JSON.stringify(request.params)})`, + }); + + const responsePromise = + wsTransports[wsIndex++]!.value!.subscribe(request); + if (wsIndex === wsTransports.length) wsIndex = 0; + const response = await responsePromise; + + common.logger.trace({ + service: "rpc", + msg: `Received eth_subscribe response (duration=${stopClock()}, params=${JSON.stringify(request.params)})`, + }); + common.metrics.ponder_rpc_request_duration.observe( + { method: "eth_subscribe", network: chain.chain.name }, + stopClock(), + ); + + return response; + } catch (_error) { + const error = _error as Error; + + if (shouldRetry(error) === false) { + common.logger.warn({ + service: "rpc", + msg: "Failed eth_subscribe request", + }); + throw error; + } + + if (i === RETRY_COUNT) { + common.logger.warn({ + service: "rpc", + msg: `Failed eth_subscribe request after ${i + 1} attempts`, + error, + }); + throw error; + } + + const duration = BASE_DURATION * 2 ** i; + common.logger.debug({ + service: "rpc", + msg: `Failed eth_subscribe request, retrying after ${duration} milliseconds`, + error, + }); + await wait(duration); + } + } + }, + supports: (method) => { + if (method === "eth_subscribe" && wsTransports.length === 0) return false; + return true; + }, + }; +}; + +/** + * @link https://github.com/wevm/viem/blob/main/src/utils/buildRequest.ts#L192 + */ +function shouldRetry(error: Error) { + if ("code" in error && typeof error.code === "number") { + // Invalid JSON + if (error.code === ParseRpcError.code) return false; + // Method does not exist + if (error.code === MethodNotFoundRpcError.code) return false; + // Method is not implemented + if (error.code === MethodNotSupportedRpcError.code) return false; + // Version of JSON-RPC protocol is not supported + if (error.code === JsonRpcVersionUnsupportedError.code) return false; + } + if (error instanceof HttpRequestError && error.status) { + // Method Not Allowed + if (error.status === 405) return false; + // Not Found + if (error.status === 404) return false; + // Not Implemented + if (error.status === 501) return false; + // HTTP Version Not Supported + if (error.status === 505) return false; + } + return true; +} diff --git a/packages/core/src/sync-historical/index.test.ts b/packages/core/src/sync-historical/index.test.ts index 6b6170a3a..eaf9f62e6 100644 --- a/packages/core/src/sync-historical/index.test.ts +++ b/packages/core/src/sync-historical/index.test.ts @@ -18,13 +18,13 @@ import { import { getAccountsConfigAndIndexingFunctions, getBlocksConfigAndIndexingFunctions, + getChain, getErc20ConfigAndIndexingFunctions, - getNetwork, getPairWithFactoryConfigAndIndexingFunctions, testClient, } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; +import { createRpc } from "@/rpc/index.js"; import { encodeFunctionData, encodeFunctionResult, @@ -42,11 +42,8 @@ beforeEach(setupIsolatedDatabase); test("createHistoricalSync()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -58,10 +55,10 @@ test("createHistoricalSync()", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -73,11 +70,8 @@ test("createHistoricalSync()", async (context) => { test("sync() with log filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -97,10 +91,10 @@ test("sync() with log filter", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -123,11 +117,8 @@ test("sync() with log filter", async (context) => { test("sync() with log filter and transaction receipts", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -148,10 +139,10 @@ test("sync() with log filter and transaction receipts", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -177,11 +168,8 @@ test("sync() with log filter and transaction receipts", async (context) => { test("sync() with block filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -195,10 +183,10 @@ test("sync() with block filter", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -224,11 +212,8 @@ test("sync() with block filter", async (context) => { test("sync() with log factory", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployFactory({ sender: ALICE }); const { result } = await createPair({ factory: address, sender: ALICE }); @@ -251,10 +236,10 @@ test("sync() with log factory", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -277,11 +262,8 @@ test("sync() with log factory", async (context) => { test("sync() with trace filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -337,16 +319,16 @@ test("sync() with trace filter", async (context) => { } } - return requestQueue.request(request); + return rpc.request(request); }; const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources: sources.filter(({ filter }) => filter.type === "trace"), syncStore, - requestQueue: { - ...requestQueue, + rpc: { + ...rpc, // @ts-ignore request, }, @@ -375,11 +357,8 @@ test("sync() with trace filter", async (context) => { test("sync() with transaction filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); await transferEth({ to: BOB, @@ -399,10 +378,10 @@ test("sync() with transaction filter", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources: sources.filter(({ filter }) => filter.type === "transaction"), syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -436,11 +415,8 @@ test("sync() with transaction filter", async (context) => { test("sync() with transfer filter", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { hash } = await transferEth({ to: BOB, @@ -479,16 +455,16 @@ test("sync() with transfer filter", async (context) => { } } - return requestQueue.request(request); + return rpc.request(request); }; const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources: sources.filter(({ filter }) => filter.type === "transfer"), syncStore, - requestQueue: { - ...requestQueue, + rpc: { + ...rpc, // @ts-ignore request, }, @@ -518,11 +494,8 @@ test("sync() with transfer filter", async (context) => { test("sync() with many filters", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -545,10 +518,10 @@ test("sync() with many filters", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources: [...erc20Sources, ...blockSources], syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -576,11 +549,8 @@ test("sync() with many filters", async (context) => { test("sync() with cache", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -600,10 +570,10 @@ test("sync() with cache", async (context) => { let historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -611,14 +581,14 @@ test("sync() with cache", async (context) => { // re-instantiate `historicalSync` to reset the cached intervals - const spy = vi.spyOn(requestQueue, "request"); + const spy = vi.spyOn(rpc, "request"); historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -631,9 +601,9 @@ test("sync() with cache", async (context) => { test("sync() with partial cache", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, + const chain = getChain(); + const rpc = createRpc({ + chain, common: context.common, }); @@ -655,10 +625,10 @@ test("sync() with partial cache", async (context) => { let historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -666,17 +636,17 @@ test("sync() with partial cache", async (context) => { // re-instantiate `historicalSync` to reset the cached intervals - let spy = vi.spyOn(requestQueue, "request"); + let spy = vi.spyOn(rpc, "request"); // @ts-ignore sources[0]!.filter.address = [sources[0]!.filter.address, zeroAddress]; historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -701,14 +671,14 @@ test("sync() with partial cache", async (context) => { // re-instantiate `historicalSync` to reset the cached intervals - spy = vi.spyOn(requestQueue, "request"); + spy = vi.spyOn(rpc, "request"); historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); @@ -739,11 +709,8 @@ test("sync() with partial cache", async (context) => { test("syncBlock() with cache", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -766,14 +733,14 @@ test("syncBlock() with cache", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources: [...erc20Sources, ...blockSources], syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); - const spy = vi.spyOn(requestQueue, "request"); + const spy = vi.spyOn(rpc, "request"); await historicalSync.sync([1, 2]); @@ -787,11 +754,8 @@ test("syncBlock() with cache", async (context) => { test("syncAddress() handles many addresses", async (context) => { const { cleanup, syncStore, database } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); context.common.options.factoryAddressCountThreshold = 10; @@ -821,10 +785,10 @@ test("syncAddress() handles many addresses", async (context) => { const historicalSync = await createHistoricalSync({ common: context.common, - network, + chain, sources, syncStore, - requestQueue, + rpc, onFatalError: () => {}, }); diff --git a/packages/core/src/sync-historical/index.ts b/packages/core/src/sync-historical/index.ts index 5c0e79dca..267eb7c97 100644 --- a/packages/core/src/sync-historical/index.ts +++ b/packages/core/src/sync-historical/index.ts @@ -1,18 +1,19 @@ import type { Common } from "@/internal/common.js"; import type { BlockFilter, + Chain, Factory, Filter, FilterWithoutBlocks, Fragment, LogFactory, LogFilter, - Network, Source, TraceFilter, TransactionFilter, TransferFilter, } from "@/internal/types.js"; +import type { RPC } from "@/rpc/index.js"; import type { SyncStore } from "@/sync-store/index.js"; import { isAddressFactory, @@ -35,7 +36,6 @@ import { intervalDifference, intervalRange, } from "@/utils/interval.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; import { _debug_traceBlockByNumber, _eth_getBlockByNumber, @@ -68,8 +68,8 @@ type CreateHistoricalSyncParameters = { common: Common; sources: Source[]; syncStore: SyncStore; - network: Network; - requestQueue: RequestQueue; + chain: Chain; + rpc: RPC; onFatalError: (error: Error) => void; }; @@ -132,7 +132,7 @@ export const createHistoricalSync = async ( Filter, { fragment: Fragment; intervals: Interval[] }[] >; - if (args.network.disableCache) { + if (args.chain.disableCache) { intervalsCache = new Map(); for (const { filter } of args.sources) { intervalsCache.set(filter, []); @@ -225,7 +225,7 @@ export const createHistoricalSync = async ( const logs = await Promise.all( intervals.flatMap((interval) => addressBatches.map((address) => - _eth_getLogs(args.requestQueue, { + _eth_getLogs(args.rpc, { address, topics, fromBlock: interval[0], @@ -252,7 +252,7 @@ export const createHistoricalSync = async ( args.common.logger.debug({ service: "sync", msg: `Caught eth_getLogs error on '${ - args.network.name + args.chain.chain.name }', updating recommended range to ${range}.`, }); @@ -320,7 +320,7 @@ export const createHistoricalSync = async ( if (blockCache.has(number)) { block = await blockCache.get(number)!; } else { - const _block = _eth_getBlockByNumber(args.requestQueue, { + const _block = _eth_getBlockByNumber(args.rpc, { blockNumber: toHex(number), }); blockCache.set(number, _block); @@ -341,7 +341,7 @@ export const createHistoricalSync = async ( if (traceCache.has(block)) { return await traceCache.get(block)!; } else { - const traces = _debug_traceBlockByNumber(args.requestQueue, { + const traces = _debug_traceBlockByNumber(args.rpc, { blockNumber: block, }); traceCache.set(block, traces); @@ -375,7 +375,7 @@ export const createHistoricalSync = async ( args.common.logger.warn({ service: "sync", msg: `Caught eth_getBlockReceipts error on '${ - args.network.name + args.chain.chain.name }', switching to eth_getTransactionReceipt method.`, error, }); @@ -406,7 +406,7 @@ export const createHistoricalSync = async ( if (transactionReceiptsCache.has(transaction)) { return await transactionReceiptsCache.get(transaction)!; } else { - const receipt = _eth_getTransactionReceipt(args.requestQueue, { + const receipt = _eth_getTransactionReceipt(args.rpc, { hash: transaction, }); transactionReceiptsCache.set(transaction, receipt); @@ -418,7 +418,7 @@ export const createHistoricalSync = async ( if (blockReceiptsCache.has(block)) { return await blockReceiptsCache.get(block)!; } else { - const blockReceipts = _eth_getBlockReceipts(args.requestQueue, { + const blockReceipts = _eth_getBlockReceipts(args.rpc, { blockHash: block, }); blockReceiptsCache.set(block, blockReceipts); @@ -440,7 +440,7 @@ export const createHistoricalSync = async ( await args.syncStore.insertLogs({ logs: logs.map((log) => ({ log })), shouldUpdateCheckpoint: false, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); }; @@ -528,7 +528,7 @@ export const createHistoricalSync = async ( await args.syncStore.insertLogs({ logs: logs.map((log, i) => ({ log, block: blocks[i]! })), shouldUpdateCheckpoint: true, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); if (isKilled) return; @@ -559,7 +559,7 @@ export const createHistoricalSync = async ( await args.syncStore.insertTransactionReceipts({ transactionReceipts, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); } }; @@ -644,7 +644,7 @@ export const createHistoricalSync = async ( await args.syncStore.insertTransactionReceipts({ transactionReceipts, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); }; @@ -719,7 +719,7 @@ export const createHistoricalSync = async ( await args.syncStore.insertTraces({ traces, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); if (isKilled) return; @@ -740,7 +740,7 @@ export const createHistoricalSync = async ( await args.syncStore.insertTransactionReceipts({ transactionReceipts, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); } }; @@ -849,7 +849,7 @@ export const createHistoricalSync = async ( args.common.logger.error({ service: "sync", - msg: `Fatal error: Unable to sync '${args.network.name}' from ${interval[0]} to ${interval[1]}.`, + msg: `Fatal error: Unable to sync '${args.chain.chain.name}' from ${interval[0]} to ${interval[1]}.`, error, }); @@ -869,7 +869,7 @@ export const createHistoricalSync = async ( const blocks = await Promise.all(blockCache.values()); await Promise.all([ - args.syncStore.insertBlocks({ blocks, chainId: args.network.chainId }), + args.syncStore.insertBlocks({ blocks, chainId: args.chain.chain.id }), args.syncStore.insertTransactions({ transactions: blocks.flatMap((block) => block.transactions @@ -879,7 +879,7 @@ export const createHistoricalSync = async ( block, })), ), - chainId: args.network.chainId, + chainId: args.chain.chain.id, }), ]); @@ -887,10 +887,10 @@ export const createHistoricalSync = async ( // Add corresponding intervals to the sync-store // Note: this should happen after so the database doesn't become corrupted - if (args.network.disableCache === false) { + if (args.chain.disableCache === false) { await args.syncStore.insertIntervals({ intervals: intervalsToSync, - chainId: args.network.chainId, + chainId: args.chain.chain.id, }); } diff --git a/packages/core/src/sync-realtime/index.test.ts b/packages/core/src/sync-realtime/index.test.ts index 002883850..eab7f1a48 100644 --- a/packages/core/src/sync-realtime/index.test.ts +++ b/packages/core/src/sync-realtime/index.test.ts @@ -18,14 +18,14 @@ import { import { getAccountsConfigAndIndexingFunctions, getBlocksConfigAndIndexingFunctions, + getChain, getErc20ConfigAndIndexingFunctions, - getNetwork, getPairWithFactoryConfigAndIndexingFunctions, testClient, } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; import type { LogFactory, LogFilter } from "@/internal/types.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; +import { createRpc } from "@/rpc/index.js"; import { _eth_getBlockByNumber } from "@/utils/rpc.js"; import { encodeFunctionData, @@ -44,11 +44,8 @@ test("createRealtimeSyncService()", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -60,8 +57,8 @@ test("createRealtimeSyncService()", async (context) => { const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -76,11 +73,8 @@ test("start() handles block", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -90,7 +84,7 @@ test("start() handles block", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); @@ -98,8 +92,8 @@ test("start() handles block", async (context) => { const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -122,11 +116,8 @@ test("start() no-op when receiving same block twice", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -136,7 +127,7 @@ test("start() no-op when receiving same block twice", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); @@ -144,8 +135,8 @@ test("start() no-op when receiving same block twice", async (context) => { const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -157,7 +148,7 @@ test("start() no-op when receiving same block twice", async (context) => { }); await queue.onIdle(); - await _eth_getBlockByNumber(requestQueue, { blockNumber: 1 }).then( + await _eth_getBlockByNumber(rpc, { blockNumber: 1 }).then( // @ts-ignore (block) => queue.add({ block }), ); @@ -175,11 +166,8 @@ test("start() gets missing block", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -189,7 +177,7 @@ test("start() gets missing block", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); @@ -197,8 +185,8 @@ test("start() gets missing block", async (context) => { const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -222,11 +210,8 @@ test("start() retries on error", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -236,18 +221,18 @@ test("start() retries on error", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); await testClient.mine({ blocks: 1 }); - const requestSpy = vi.spyOn(requestQueue, "request"); + const requestSpy = vi.spyOn(rpc, "request"); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -273,11 +258,8 @@ test("kill()", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -287,7 +269,7 @@ test("kill()", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); @@ -295,8 +277,8 @@ test("kill()", async (context) => { const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -318,11 +300,8 @@ test("handleBlock() block event with log", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -346,14 +325,14 @@ test("handleBlock() block event with log", async (context) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent, onFatalError: vi.fn(), @@ -394,11 +373,8 @@ test("handleBlock() block event with log factory", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployFactory({ sender: ALICE }); const { result: pair } = await createPair({ @@ -430,14 +406,14 @@ test("handleBlock() block event with log factory", async (context) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent, onFatalError: vi.fn(), @@ -488,11 +464,8 @@ test("handleBlock() block event with block", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -508,14 +481,14 @@ test("handleBlock() block event with block", async (context) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent, onFatalError: vi.fn(), @@ -558,11 +531,8 @@ test("handleBlock() block event with transaction", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); await transferEth({ to: BOB, @@ -585,14 +555,14 @@ test("handleBlock() block event with transaction", async (context) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources: sources.filter(({ filter }) => filter.type === "transaction"), onEvent, onFatalError: vi.fn(), @@ -634,11 +604,8 @@ test("handleBlock() block event with transfer", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { hash } = await transferEth({ to: BOB, @@ -674,7 +641,7 @@ test("handleBlock() block event with transfer", async (context) => { ]); } - return requestQueue.request(request); + return rpc.request(request); }; const data: Extract[] = []; @@ -683,15 +650,15 @@ test("handleBlock() block event with transfer", async (context) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue: { - ...requestQueue, + chain, + rpc: { + ...rpc, // @ts-ignore request, }, @@ -736,11 +703,8 @@ test("handleBlock() block event with trace", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -756,11 +720,11 @@ test("handleBlock() block event with trace", async (context) => { sender: ALICE, }); - const block2 = await _eth_getBlockByNumber(requestQueue, { + const block2 = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); - const block3 = await _eth_getBlockByNumber(requestQueue, { + const block3 = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); @@ -820,7 +784,7 @@ test("handleBlock() block event with trace", async (context) => { return Promise.resolve([]); } - return requestQueue.request(request); + return rpc.request(request); }; const data: Extract[] = []; @@ -829,15 +793,15 @@ test("handleBlock() block event with trace", async (context) => { data.push(_data); }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue: { - ...requestQueue, + chain, + rpc: { + ...rpc, // @ts-ignore request, }, @@ -891,11 +855,8 @@ test("handleBlock() finalize event", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -905,7 +866,7 @@ test("handleBlock() finalize event", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); @@ -917,8 +878,8 @@ test("handleBlock() finalize event", async (context) => { const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent, onFatalError: vi.fn(), @@ -950,11 +911,8 @@ test("handleReorg() finds common ancestor", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -966,14 +924,14 @@ test("handleReorg() finds common ancestor", async (context) => { const onEvent = vi.fn(); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent, onFatalError: vi.fn(), @@ -986,7 +944,7 @@ test("handleReorg() finds common ancestor", async (context) => { initialChildAddresses: new Map(), }); - await _eth_getBlockByNumber(requestQueue, { blockNumber: 2 }).then( + await _eth_getBlockByNumber(rpc, { blockNumber: 2 }).then( // @ts-ignore (block) => queue.add({ block }), ); @@ -1009,11 +967,8 @@ test("handleReorg() throws error for deep reorg", async (context) => { const { common } = context; const { cleanup } = await setupDatabaseServices(context); - const network = getNetwork({ finalityBlockCount: 2 }); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain({ finalityBlockCount: 2 }); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -1023,14 +978,14 @@ test("handleReorg() throws error for deep reorg", async (context) => { rawIndexingFunctions, }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { + const finalizedBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); const realtimeSync = createRealtimeSync({ common, - network, - requestQueue, + chain, + rpc, sources, onEvent: vi.fn(), onFatalError: vi.fn(), @@ -1044,7 +999,7 @@ test("handleReorg() throws error for deep reorg", async (context) => { }); await queue.onIdle(); - const block = await _eth_getBlockByNumber(requestQueue, { + const block = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); diff --git a/packages/core/src/sync-realtime/index.ts b/packages/core/src/sync-realtime/index.ts index 75d1927f4..b405fd7b6 100644 --- a/packages/core/src/sync-realtime/index.ts +++ b/packages/core/src/sync-realtime/index.ts @@ -1,15 +1,16 @@ import type { Common } from "@/internal/common.js"; import type { BlockFilter, + Chain, Factory, Filter, LogFilter, - Network, Source, TraceFilter, TransactionFilter, TransferFilter, } from "@/internal/types.js"; +import type { RPC } from "@/rpc/index.js"; import { getChildAddress, isAddressFactory, @@ -31,7 +32,6 @@ import type { SyncTransactionReceipt, } from "@/types/sync.js"; import { range } from "@/utils/range.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; import { _debug_traceBlockByHash, _eth_getBlockByHash, @@ -59,8 +59,8 @@ export type RealtimeSync = { type CreateRealtimeSyncParameters = { common: Common; - network: Network; - requestQueue: RequestQueue; + chain: Chain; + rpc: RPC; sources: Source[]; onEvent: (event: RealtimeSyncEvent) => Promise; onFatalError: (error: Error) => void; @@ -117,7 +117,7 @@ export const createRealtimeSync = ( let unfinalizedBlocks: LightBlock[] = []; let queue: Queue number }>; let consecutiveErrors = 0; - let interval: NodeJS.Timeout | undefined; + let cleanup: (() => Promise) | undefined; const factories: Factory[] = []; const logFilters: LogFilter[] = []; @@ -194,7 +194,7 @@ export const createRealtimeSync = ( }: BlockWithEventData & { endClock?: () => number }) => { args.common.logger.debug({ service: "realtime", - msg: `Started syncing '${args.network.name}' block ${hexToNumber(block.number)}`, + msg: `Started syncing '${args.chain.chain.name}' block ${hexToNumber(block.number)}`, }); // Update `unfinalizedChildAddresses` @@ -388,12 +388,12 @@ export const createRealtimeSync = ( const text = _text.filter((t) => t !== undefined).join(" and "); args.common.logger.info({ service: "realtime", - msg: `Synced ${text} from '${args.network.name}' block ${hexToNumber(block.number)}`, + msg: `Synced ${text} from '${args.chain.chain.name}' block ${hexToNumber(block.number)}`, }); } else { args.common.logger.info({ service: "realtime", - msg: `Synced block ${hexToNumber(block.number)} from '${args.network.name}' `, + msg: `Synced block ${hexToNumber(block.number)} from '${args.chain.chain.name}' `, }); } @@ -403,6 +403,7 @@ export const createRealtimeSync = ( // @ts-ignore block.transactions = undefined; + // TODO(kyle) why awaited await args.onEvent({ type: "block", hasMatchedFilter: matchedFilters.size > 0, @@ -421,19 +422,19 @@ export const createRealtimeSync = ( // finalized. const blockMovesFinality = hexToNumber(block.number) >= - hexToNumber(finalizedBlock.number) + 2 * args.network.finalityBlockCount; + hexToNumber(finalizedBlock.number) + 2 * args.chain.finalityBlockCount; if (blockMovesFinality) { const pendingFinalizedBlock = unfinalizedBlocks.find( (lb) => hexToNumber(lb.number) === - hexToNumber(block.number) - args.network.finalityBlockCount, + hexToNumber(block.number) - args.chain.finalityBlockCount, )!; args.common.logger.debug({ service: "realtime", msg: `Finalized ${hexToNumber(pendingFinalizedBlock.number) - hexToNumber(finalizedBlock.number) + 1} '${ - args.network.name - }' blocks from ${hexToNumber(finalizedBlock.number) + 1} to ${hexToNumber(pendingFinalizedBlock.number)}`, + args.chain.chain.name + }' blocks [${hexToNumber(finalizedBlock.number) + 1}, ${hexToNumber(pendingFinalizedBlock.number)}]`, }); const finalizedBlocks = unfinalizedBlocks.filter( @@ -493,7 +494,7 @@ export const createRealtimeSync = ( args.common.logger.debug({ service: "realtime", - msg: `Finished syncing '${args.network.name}' block ${hexToNumber(block.number)}`, + msg: `Finished syncing '${args.chain.chain.name}' block ${hexToNumber(block.number)}`, }); }; @@ -507,7 +508,7 @@ export const createRealtimeSync = ( const handleReorg = async (block: SyncBlock) => { args.common.logger.warn({ service: "realtime", - msg: `Detected forked '${args.network.name}' block at height ${hexToNumber(block.number)}`, + msg: `Detected forked '${args.chain.chain.name}' block at height ${hexToNumber(block.number)}`, }); // Record blocks that have been removed from the local chain. @@ -531,13 +532,13 @@ export const createRealtimeSync = ( if (unfinalizedBlocks.length === 0) { // No compatible block was found in the local chain, must be a deep reorg. - const msg = `Encountered unrecoverable '${args.network.name}' reorg beyond finalized block ${hexToNumber(finalizedBlock.number)}`; + const msg = `Encountered unrecoverable '${args.chain.chain.name}' reorg beyond finalized block ${hexToNumber(finalizedBlock.number)}`; args.common.logger.warn({ service: "realtime", msg }); throw new Error(msg); } else { - remoteBlock = await _eth_getBlockByHash(args.requestQueue, { + remoteBlock = await _eth_getBlockByHash(args.rpc, { hash: remoteBlock.parentHash, }); // Add tip to `reorgedBlocks` @@ -551,9 +552,9 @@ export const createRealtimeSync = ( args.common.logger.warn({ service: "realtime", - msg: `Reconciled ${reorgedBlocks.length}-block reorg on '${ - args.network.name - }' with common ancestor block ${hexToNumber(commonAncestor.number)}`, + msg: `Reconciled ${reorgedBlocks.length}-block '${ + args.chain.chain.name + }' reorg with common ancestor block ${hexToNumber(commonAncestor.number)}`, }); // recompute `unfinalizedChildAddresses` @@ -592,7 +593,7 @@ export const createRealtimeSync = ( if (isBlockReceipts === false) { const transactionReceipts = await Promise.all( Array.from(transactionHashes).map(async (hash) => - _eth_getTransactionReceipt(args.requestQueue, { hash }), + _eth_getTransactionReceipt(args.rpc, { hash }), ), ); @@ -601,7 +602,7 @@ export const createRealtimeSync = ( let blockReceipts: SyncTransactionReceipt[]; try { - blockReceipts = await _eth_getBlockReceipts(args.requestQueue, { + blockReceipts = await _eth_getBlockReceipts(args.rpc, { blockHash, }); } catch (_error) { @@ -609,7 +610,7 @@ export const createRealtimeSync = ( args.common.logger.warn({ service: "realtime", msg: `Caught eth_getBlockReceipts error on '${ - args.network.name + args.chain.chain.name }', switching to eth_getTransactionReceipt method.`, error, }); @@ -657,7 +658,7 @@ export const createRealtimeSync = ( let logs: SyncLog[] = []; if (shouldRequestLogs) { - logs = await _eth_getLogs(args.requestQueue, { blockHash: block.hash }); + logs = await _eth_getLogs(args.rpc, { blockHash: block.hash }); // Protect against RPCs returning empty logs. Known to happen near chain tip. if (block.logsBloom !== zeroLogsBloom && logs.length === 0) { @@ -691,11 +692,11 @@ export const createRealtimeSync = ( if (log.transactionHash === zeroHash) { args.common.logger.warn({ service: "sync", - msg: `Detected log with empty transaction hash in block ${block.hash} at log index ${hexToNumber(log.logIndex)}. This is expected for some networks like ZKsync.`, + msg: `Detected '${args.chain.chain.name}' log with empty transaction hash in block ${block.hash} at log index ${hexToNumber(log.logIndex)}. This is expected for some networks like ZKsync.`, }); } else { throw new Error( - `Detected inconsistent RPC responses. 'log.transactionHash' ${log.transactionHash} not found in 'block.transactions' ${block.hash}`, + `Detected inconsistent '${args.chain.chain.name}' RPC responses. 'log.transactionHash' ${log.transactionHash} not found in 'block.transactions' ${block.hash}`, ); } } @@ -708,7 +709,7 @@ export const createRealtimeSync = ( ) { args.common.logger.debug({ service: "realtime", - msg: `Skipped fetching logs for '${args.network.name}' block ${hexToNumber(block.number)} due to bloom filter result`, + msg: `Skipped fetching '${args.chain.chain.name}' logs for block ${hexToNumber(block.number)} due to bloom filter result`, }); } @@ -721,7 +722,7 @@ export const createRealtimeSync = ( let traces: SyncTrace[] = []; if (shouldRequestTraces) { - traces = await _debug_traceBlockByHash(args.requestQueue, { + traces = await _debug_traceBlockByHash(args.rpc, { hash: block.hash, }); @@ -782,7 +783,7 @@ export const createRealtimeSync = ( if (log.transactionHash === zeroHash) { args.common.logger.warn({ service: "sync", - msg: `Detected log with empty transaction hash in block ${block.hash} at log index ${hexToNumber(log.logIndex)}. This is expected for some networks like ZKsync.`, + msg: `Detected '${args.chain.chain.name}' log with empty transaction hash in block ${block.hash} at log index ${hexToNumber(log.logIndex)}. This is expected for some networks like ZKsync.`, }); } else { requiredTransactions.add(log.transactionHash); @@ -895,7 +896,7 @@ export const createRealtimeSync = ( }; return { - start(startArgs) { + async start(startArgs) { finalizedBlock = startArgs.syncProgress.finalized; finalizedChildAddresses = startArgs.initialChildAddresses; /** @@ -919,7 +920,7 @@ export const createRealtimeSync = ( if (latestBlock.hash === block.hash) { args.common.logger.trace({ service: "realtime", - msg: `Skipped processing '${args.network.name}' block ${hexToNumber(block.number)}, already synced`, + msg: `Skipped processing '${args.chain.chain.name}' block ${hexToNumber(block.number)}, already synced`, }); return; @@ -951,7 +952,7 @@ export const createRealtimeSync = ( const pendingBlocks = await Promise.all( missingBlockRange.map((blockNumber) => - _eth_getBlockByNumber(args.requestQueue, { + _eth_getBlockByNumber(args.rpc, { blockNumber, }).then((block) => fetchBlockEventData(block)), ), @@ -960,11 +961,11 @@ export const createRealtimeSync = ( args.common.logger.debug({ service: "realtime", msg: `Fetched ${missingBlockRange.length} missing '${ - args.network.name - }' blocks from ${hexToNumber(latestBlock.number) + 1} to ${Math.min( + args.chain.chain.name + }' blocks [${hexToNumber(latestBlock.number) + 1}, ${Math.min( hexToNumber(block.number), hexToNumber(latestBlock.number) + MAX_QUEUED_BLOCKS, - )}`, + )}]`, }); // This is needed to ensure proper `kill()` behavior. When the service @@ -1005,7 +1006,7 @@ export const createRealtimeSync = ( args.common.logger.warn({ service: "realtime", - msg: `Failed to process '${args.network.name}' block ${hexToNumber(block.number)}`, + msg: `Failed to process '${args.chain.chain.name}' block ${hexToNumber(block.number)}`, error, }); @@ -1013,7 +1014,7 @@ export const createRealtimeSync = ( args.common.logger.warn({ service: "realtime", - msg: `Retrying '${args.network.name}' sync after ${duration} ${ + msg: `Retrying '${args.chain.chain.name}' sync after ${duration} ${ duration === 1 ? "second" : "seconds" }.`, }); @@ -1028,7 +1029,7 @@ export const createRealtimeSync = ( if (++consecutiveErrors === ERROR_TIMEOUT.length) { args.common.logger.error({ service: "realtime", - msg: `Fatal error: Unable to process '${args.network.name}' block ${hexToNumber(block.number)} after ${ERROR_TIMEOUT.length} attempts.`, + msg: `Fatal error: Unable to process '${args.chain.chain.name}' block ${hexToNumber(block.number)} after ${ERROR_TIMEOUT.length} attempts.`, error, }); @@ -1038,22 +1039,45 @@ export const createRealtimeSync = ( }, }); - const enqueue = async () => { + const enqueue = async (hash?: Hash) => { try { - const block = await _eth_getBlockByNumber(args.requestQueue, { - blockTag: "latest", + let block: SyncBlock; + + args.common.logger.debug({ + service: "realtime", + msg: `Received latest '${args.chain.chain.name}' block ${hexToNumber(block.number)}`, }); const latestBlock = getLatestUnfinalizedBlock(); - // We already saw and handled this block. No-op. - if (latestBlock.hash === block.hash) { - args.common.logger.trace({ - service: "realtime", - msg: `Skipped processing '${args.network.name}' block ${hexToNumber(block.number)}, already synced`, + if (hash === undefined) { + block = await _eth_getBlockByNumber(args.rpc, { + blockTag: "latest", }); - return; + // We already saw and handled this block. No-op. + if (latestBlock.hash === block.hash) { + args.common.logger.trace({ + service: "realtime", + msg: `Skipped processing '${args.chain.chain.name}' block ${hexToNumber(block.number)}, already synced`, + }); + + return; + } + } else { + // We already saw and handled this block. No-op. + if (latestBlock.hash === hash) { + args.common.logger.trace({ + service: "realtime", + msg: `Skipped processing '${args.chain.chain.name}' block ${hexToNumber(latestBlock.number)}, already synced`, + }); + + return; + } + + block = await _eth_getBlockByHash(args.rpc, { + hash, + }); } const endClock = startClock(); @@ -1070,7 +1094,7 @@ export const createRealtimeSync = ( args.common.logger.warn({ service: "realtime", - msg: `Failed to fetch latest '${args.network.name}' block`, + msg: `Failed to fetch latest '${args.chain.chain.name}' block`, error, }); @@ -1078,7 +1102,7 @@ export const createRealtimeSync = ( if (++consecutiveErrors === ERROR_TIMEOUT.length) { args.common.logger.error({ service: "realtime", - msg: `Fatal error: Unable to fetch latest '${args.network.name}' block after ${ERROR_TIMEOUT.length} attempts.`, + msg: `Fatal error: Unable to fetch latest '${args.chain.chain.name}' block after ${ERROR_TIMEOUT.length} attempts.`, error, }); @@ -1087,13 +1111,48 @@ export const createRealtimeSync = ( } }; - interval = setInterval(enqueue, args.network.pollingInterval); + if (args.rpc.supports("eth_subscribe")) { + args.common.logger.info({ + service: "realtime", + msg: `Subscribing to '${args.chain.chain.name}' network via websocket`, + }); + + try { + const connection = await args.rpc.subscribe({ + params: ["newHeads"], + onData: (data) => { + // TODO(kyle) handle data.error + + enqueue(data.result.hash); + }, + onError: () => { + // TODO(kyle) handle error + }, + }); + cleanup = () => connection.unsubscribe().then(() => {}); + } catch { + // TODO(kyle) handle error + args.common.logger.warn({ + service: "realtime", + msg: `Failed subscribing to '${args.chain.chain.name}' network via websocket, defaulting to polling`, + }); + const interval = setInterval(enqueue, args.chain.pollingInterval); + cleanup = () => Promise.resolve(clearInterval(interval)); + } + } else { + args.common.logger.info({ + service: "realtime", + msg: `Subscribing to '${args.chain.chain.name}' network via polling`, + }); + const interval = setInterval(enqueue, args.chain.pollingInterval); + cleanup = () => Promise.resolve(clearInterval(interval)); + } // Note: this is done just for testing. return enqueue().then(() => queue); }, async kill() { - clearInterval(interval); + await cleanup?.(); isKilled = true; queue?.pause(); queue?.clear(); diff --git a/packages/core/src/sync-store/encoding.ts b/packages/core/src/sync-store/encoding.ts index 16d83738d..32b634839 100644 --- a/packages/core/src/sync-store/encoding.ts +++ b/packages/core/src/sync-store/encoding.ts @@ -8,9 +8,9 @@ import type { } from "@/types/sync.js"; import { EVENT_TYPES, + MAX_CHECKPOINT, + ZERO_CHECKPOINT, encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; import { toLowerCase } from "@/utils/lowercase.js"; import type { ColumnType, Insertable } from "kysely"; @@ -55,9 +55,9 @@ export const encodeBlock = ({ blockTimestamp: hexToNumber(block.timestamp), chainId: BigInt(chainId), blockNumber: hexToBigInt(block.number), - transactionIndex: maxCheckpoint.transactionIndex, + transactionIndex: MAX_CHECKPOINT.transactionIndex, eventType: EVENT_TYPES.blocks, - eventIndex: zeroCheckpoint.eventIndex, + eventIndex: ZERO_CHECKPOINT.eventIndex, }), baseFeePerGas: block.baseFeePerGas ? hexToBigInt(block.baseFeePerGas) @@ -183,7 +183,7 @@ export const encodeTransaction = ({ blockNumber: hexToBigInt(transaction.blockNumber), transactionIndex: hexToBigInt(transaction.transactionIndex), eventType: EVENT_TYPES.transactions, - eventIndex: zeroCheckpoint.eventIndex, + eventIndex: ZERO_CHECKPOINT.eventIndex, }), chainId, blockHash: transaction.blockHash, diff --git a/packages/core/src/sync-store/index.test.ts b/packages/core/src/sync-store/index.test.ts index 2d78c753d..ffba5f74d 100644 --- a/packages/core/src/sync-store/index.test.ts +++ b/packages/core/src/sync-store/index.test.ts @@ -17,8 +17,8 @@ import { import { getAccountsConfigAndIndexingFunctions, getBlocksConfigAndIndexingFunctions, + getChain, getErc20ConfigAndIndexingFunctions, - getNetwork, getPairWithFactoryConfigAndIndexingFunctions, testClient, } from "@/_test/utils.js"; @@ -29,14 +29,14 @@ import type { LogFactory, LogFilter, } from "@/internal/types.js"; +import { createRpc } from "@/rpc/index.js"; import type { SyncTrace, SyncTransaction } from "@/types/sync.js"; import { + MAX_CHECKPOINT, + MAX_CHECKPOINT_STRING, + ZERO_CHECKPOINT_STRING, decodeCheckpoint, - encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; import { _eth_getBlockByNumber, _eth_getLogs, @@ -490,15 +490,12 @@ test("insertIntervals() preserves fragments", async (context) => { test("getChildAddresses()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployFactory({ sender: ALICE }); const { result } = await createPair({ factory: address, sender: ALICE }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -560,15 +557,12 @@ test("getChildAddresses() empty", async (context) => { test("getChildAddresses() distinct", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployFactory({ sender: ALICE }); const { result } = await createPair({ factory: address, sender: ALICE }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -604,15 +598,12 @@ test("getChildAddresses() distinct", async (context) => { test("filterChildAddresses()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployFactory({ sender: ALICE }); const { result } = await createPair({ factory: address, sender: ALICE }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -647,11 +638,8 @@ test("filterChildAddresses()", async (context) => { test("insertLogs()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -660,7 +648,7 @@ test("insertLogs()", async (context) => { amount: parseEther("1"), sender: ALICE, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -680,11 +668,8 @@ test("insertLogs()", async (context) => { test("insertLogs() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -693,7 +678,7 @@ test("insertLogs() with duplicates", async (context) => { amount: parseEther("1"), sender: ALICE, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -719,11 +704,8 @@ test("insertLogs() with duplicates", async (context) => { test("insertLogs() creates checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -732,11 +714,11 @@ test("insertLogs() creates checkpoint", async (context) => { amount: parseEther("1"), sender: ALICE, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); @@ -762,11 +744,8 @@ test("insertLogs() creates checkpoint", async (context) => { test("insertLogs() upserts checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -775,11 +754,11 @@ test("insertLogs() upserts checkpoint", async (context) => { amount: parseEther("1"), sender: ALICE, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); @@ -816,14 +795,11 @@ test("insertLogs() upserts checkpoint", async (context) => { test("insertBlocks()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await testClient.mine({ blocks: 1 }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -841,14 +817,11 @@ test("insertBlocks()", async (context) => { test("insertBlocks() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await testClient.mine({ blocks: 1 }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -867,14 +840,11 @@ test("insertBlocks() with duplicates", async (context) => { test("insertBlocks() creates checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await testClient.mine({ blocks: 1 }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -889,7 +859,7 @@ test("insertBlocks() creates checkpoint", async (context) => { expect(checkpoint.blockTimestamp).toBe(hexToNumber(rpcBlock.timestamp)); expect(checkpoint.chainId).toBe(1n); expect(checkpoint.blockNumber).toBe(1n); - expect(checkpoint.transactionIndex).toBe(maxCheckpoint.transactionIndex); + expect(checkpoint.transactionIndex).toBe(MAX_CHECKPOINT.transactionIndex); expect(checkpoint.eventType).toBe(5); expect(checkpoint.eventIndex).toBe(0n); @@ -899,14 +869,11 @@ test("insertBlocks() creates checkpoint", async (context) => { test("hasBlock()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await testClient.mine({ blocks: 1 }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -928,11 +895,8 @@ test("hasBlock()", async (context) => { test("insertTransactions()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -942,7 +906,7 @@ test("insertTransactions()", async (context) => { sender: ALICE, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertTransactions({ @@ -962,11 +926,8 @@ test("insertTransactions()", async (context) => { test("insertTransactions() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -976,7 +937,7 @@ test("insertTransactions() with duplicates", async (context) => { sender: ALICE, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertTransactions({ @@ -1000,11 +961,8 @@ test("insertTransactions() with duplicates", async (context) => { test("hasTransaction()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1014,7 +972,7 @@ test("hasTransaction()", async (context) => { sender: ALICE, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertTransactions({ @@ -1038,11 +996,8 @@ test("hasTransaction()", async (context) => { test("insertTransactionReceipts()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1052,7 +1007,7 @@ test("insertTransactionReceipts()", async (context) => { sender: ALICE, }); - const rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + const rpcTransactionReceipt = await _eth_getTransactionReceipt(rpc, { hash, }); @@ -1073,11 +1028,8 @@ test("insertTransactionReceipts()", async (context) => { test("insertTransactionReceipts() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1087,7 +1039,7 @@ test("insertTransactionReceipts() with duplicates", async (context) => { sender: ALICE, }); - const rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + const rpcTransactionReceipt = await _eth_getTransactionReceipt(rpc, { hash, }); @@ -1112,11 +1064,8 @@ test("insertTransactionReceipts() with duplicates", async (context) => { test("hasTransactionReceipt()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1126,7 +1075,7 @@ test("hasTransactionReceipt()", async (context) => { sender: ALICE, }); - const rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + const rpcTransactionReceipt = await _eth_getTransactionReceipt(rpc, { hash, }); @@ -1151,11 +1100,8 @@ test("hasTransactionReceipt()", async (context) => { test("insertTraces()", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1189,7 +1135,7 @@ test("insertTraces()", async (context) => { transactionHash: hash, } satisfies SyncTrace; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -1216,11 +1162,8 @@ test("insertTraces()", async (context) => { test("insertTraces() creates checkpoint", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1254,7 +1197,7 @@ test("insertTraces() creates checkpoint", async (context) => { transactionHash: hash, } satisfies SyncTrace; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -1288,11 +1231,8 @@ test("insertTraces() creates checkpoint", async (context) => { test("insertTraces() with duplicates", async (context) => { const { cleanup, database, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1326,7 +1266,7 @@ test("insertTraces() with duplicates", async (context) => { transactionHash: hash, } satisfies SyncTrace; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -1363,11 +1303,8 @@ test("insertTraces() with duplicates", async (context) => { test("getEvents() returns events", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -1376,12 +1313,12 @@ test("getEvents() returns events", async (context) => { amount: parseEther("1"), sender: ALICE, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); @@ -1411,8 +1348,8 @@ test("getEvents() returns events", async (context) => { const { events } = await syncStore.getEvents({ filters: [filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1424,11 +1361,8 @@ test("getEvents() returns events", async (context) => { test("getEvents() handles log filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -1450,7 +1384,7 @@ test("getEvents() handles log filter logic", async (context) => { rawIndexingFunctions, }); - let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + let rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1460,7 +1394,7 @@ test("getEvents() handles log filter logic", async (context) => { chainId: 1, }); - let rpcLogs = await _eth_getLogs(requestQueue, { + let rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -1472,7 +1406,7 @@ test("getEvents() handles log filter logic", async (context) => { // noisy data - rpcBlock = await _eth_getBlockByNumber(requestQueue, { + rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 4, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1482,7 +1416,7 @@ test("getEvents() handles log filter logic", async (context) => { chainId: 1, }); - rpcLogs = await _eth_getLogs(requestQueue, { + rpcLogs = await _eth_getLogs(rpc, { fromBlock: 4, toBlock: 4, }); @@ -1494,8 +1428,8 @@ test("getEvents() handles log filter logic", async (context) => { const { events } = await syncStore.getEvents({ filters: [sources[0]!.filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1507,11 +1441,8 @@ test("getEvents() handles log filter logic", async (context) => { test("getEvents() handles log factory", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address: factory } = await deployFactory({ sender: ALICE }); const { result: pair } = await createPair({ factory, sender: ALICE }); @@ -1534,7 +1465,7 @@ test("getEvents() handles log factory", async (context) => { // factory - let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + let rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1544,7 +1475,7 @@ test("getEvents() handles log factory", async (context) => { chainId: 1, }); - let rpcLogs = await _eth_getLogs(requestQueue, { + let rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -1556,7 +1487,7 @@ test("getEvents() handles log factory", async (context) => { // pair - rpcBlock = await _eth_getBlockByNumber(requestQueue, { + rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1566,7 +1497,7 @@ test("getEvents() handles log factory", async (context) => { chainId: 1, }); - rpcLogs = await _eth_getLogs(requestQueue, { + rpcLogs = await _eth_getLogs(rpc, { fromBlock: 3, toBlock: 3, }); @@ -1578,8 +1509,8 @@ test("getEvents() handles log factory", async (context) => { const { events } = await syncStore.getEvents({ filters: [sources[0]!.filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1591,11 +1522,8 @@ test("getEvents() handles log factory", async (context) => { test("getEvents() handles multiple log factories", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address: factory } = await deployFactory({ sender: ALICE }); const { result: pair } = await createPair({ factory, sender: ALICE }); @@ -1618,7 +1546,7 @@ test("getEvents() handles multiple log factories", async (context) => { // factory - let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + let rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1628,7 +1556,7 @@ test("getEvents() handles multiple log factories", async (context) => { chainId: 1, }); - let rpcLogs = await _eth_getLogs(requestQueue, { + let rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -1640,7 +1568,7 @@ test("getEvents() handles multiple log factories", async (context) => { // pair - rpcBlock = await _eth_getBlockByNumber(requestQueue, { + rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1650,7 +1578,7 @@ test("getEvents() handles multiple log factories", async (context) => { chainId: 1, }); - rpcLogs = await _eth_getLogs(requestQueue, { + rpcLogs = await _eth_getLogs(rpc, { fromBlock: 3, toBlock: 3, }); @@ -1670,8 +1598,8 @@ test("getEvents() handles multiple log factories", async (context) => { const { events } = await syncStore.getEvents({ filters: [filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1683,11 +1611,8 @@ test("getEvents() handles multiple log factories", async (context) => { test("getEvents() handles block filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await testClient.mine({ blocks: 2 }); @@ -1699,20 +1624,20 @@ test("getEvents() handles block filter logic", async (context) => { rawIndexingFunctions, }); - let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + let rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); - rpcBlock = await _eth_getBlockByNumber(requestQueue, { + rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const { events } = await syncStore.getEvents({ filters: [sources[0]!.filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1724,11 +1649,8 @@ test("getEvents() handles block filter logic", async (context) => { test("getEvents() handles trace filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -1747,7 +1669,7 @@ test("getEvents() handles trace filter logic", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1794,8 +1716,8 @@ test("getEvents() handles trace filter logic", async (context) => { const { events } = await syncStore.getEvents({ filters: sources.map((source) => source.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1807,11 +1729,8 @@ test("getEvents() handles trace filter logic", async (context) => { test("getEvents() handles transaction filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { hash } = await transferEth({ to: BOB, @@ -1829,7 +1748,7 @@ test("getEvents() handles transaction filter logic", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1839,7 +1758,7 @@ test("getEvents() handles transaction filter logic", async (context) => { chainId: 1, }); - const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + const rpcReceipt = await _eth_getTransactionReceipt(rpc, { hash }); await syncStore.insertTransactionReceipts({ transactionReceipts: [rpcReceipt], @@ -1848,8 +1767,8 @@ test("getEvents() handles transaction filter logic", async (context) => { const { events } = await syncStore.getEvents({ filters: sources.map((source) => source.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1861,11 +1780,8 @@ test("getEvents() handles transaction filter logic", async (context) => { test("getEvents() handles transfer filter logic", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { hash } = await transferEth({ to: BOB, @@ -1883,7 +1799,7 @@ test("getEvents() handles transfer filter logic", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1893,7 +1809,7 @@ test("getEvents() handles transfer filter logic", async (context) => { chainId: 1, }); - const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + const rpcReceipt = await _eth_getTransactionReceipt(rpc, { hash }); await syncStore.insertTransactionReceipts({ transactionReceipts: [rpcReceipt], @@ -1929,8 +1845,8 @@ test("getEvents() handles transfer filter logic", async (context) => { const { events } = await syncStore.getEvents({ filters: sources.map((source) => source.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -1943,11 +1859,8 @@ test("getEvents() handles transfer filter logic", async (context) => { test("getEvents() handles block bounds", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -1965,7 +1878,7 @@ test("getEvents() handles block bounds", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -1975,7 +1888,7 @@ test("getEvents() handles block bounds", async (context) => { chainId: 1, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -1990,8 +1903,8 @@ test("getEvents() handles block bounds", async (context) => { const { events } = await syncStore.getEvents({ filters: [sources[0]!.filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -2003,11 +1916,8 @@ test("getEvents() handles block bounds", async (context) => { test("getEvents() pagination", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await testClient.mine({ blocks: 2 }); @@ -2019,20 +1929,20 @@ test("getEvents() pagination", async (context) => { rawIndexingFunctions, }); - let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + let rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); - rpcBlock = await _eth_getBlockByNumber(requestQueue, { + rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const { events, cursor } = await syncStore.getEvents({ filters: [sources[0]!.filter], - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 1, }); @@ -2041,7 +1951,7 @@ test("getEvents() pagination", async (context) => { const { events: events2 } = await syncStore.getEvents({ filters: [sources[0]!.filter], from: cursor, - to: encodeCheckpoint(maxCheckpoint), + to: MAX_CHECKPOINT_STRING, limit: 1, }); @@ -2096,11 +2006,8 @@ test("pruneRpcRequestResult", async (context) => { test("pruneByChain deletes blocks, logs, traces, transactions", async (context) => { const { syncStore, database, cleanup } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash: hash1 } = await mintErc20({ @@ -2118,7 +2025,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) // block 2 (first mint) - let rpcBlock = await _eth_getBlockByNumber(requestQueue, { + let rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -2128,7 +2035,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) chainId: 1, }); - let rpcLogs = await _eth_getLogs(requestQueue, { + let rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -2138,7 +2045,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) chainId: 1, }); - let rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + let rpcTransactionReceipt = await _eth_getTransactionReceipt(rpc, { hash: hash1, }); @@ -2184,7 +2091,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) // block 3 (second mint) - rpcBlock = await _eth_getBlockByNumber(requestQueue, { + rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -2194,7 +2101,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) chainId: 1, }); - rpcLogs = await _eth_getLogs(requestQueue, { + rpcLogs = await _eth_getLogs(rpc, { fromBlock: 3, toBlock: 3, }); @@ -2204,7 +2111,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) chainId: 1, }); - rpcTransactionReceipt = await _eth_getTransactionReceipt(requestQueue, { + rpcTransactionReceipt = await _eth_getTransactionReceipt(rpc, { hash: hash1, }); @@ -2226,7 +2133,7 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) chainId: 1, }); - await syncStore.pruneByChain({ chainId: 1, fromBlock: 3 }); + await syncStore.pruneByChain({ chainId: 1 }); const logs = await database.qb.sync.selectFrom("logs").selectAll().execute(); const blocks = await database.qb.sync @@ -2246,11 +2153,11 @@ test("pruneByChain deletes blocks, logs, traces, transactions", async (context) .selectAll() .execute(); - expect(logs).toHaveLength(1); - expect(blocks).toHaveLength(1); - expect(traces).toHaveLength(1); - expect(transactions).toHaveLength(1); - expect(transactionReceipts).toHaveLength(1); + expect(logs).toHaveLength(0); + expect(blocks).toHaveLength(0); + expect(traces).toHaveLength(0); + expect(transactions).toHaveLength(0); + expect(transactionReceipts).toHaveLength(0); await cleanup(); }); diff --git a/packages/core/src/sync-store/index.ts b/packages/core/src/sync-store/index.ts index ac59eea44..159e65217 100644 --- a/packages/core/src/sync-store/index.ts +++ b/packages/core/src/sync-store/index.ts @@ -118,10 +118,7 @@ export type SyncStore = { blocks: Pick[]; chainId: number; }): Promise; - pruneByChain(args: { - fromBlock: number; - chainId: number; - }): Promise; + pruneByChain(args: { chainId: number }): Promise; }; const logFactorySQL = ( @@ -902,38 +899,23 @@ export const createSyncStore = ({ .execute(); }, ), - pruneByChain: async ({ fromBlock, chainId }) => + pruneByChain: async ({ chainId }) => database.wrap({ method: "pruneByChain", includeTraceLogs: true }, () => database.qb.sync.transaction().execute(async (tx) => { - await tx - .deleteFrom("logs") - .where("chainId", "=", chainId) - .where("blockNumber", ">=", fromBlock.toString()) - .execute(); - await tx - .deleteFrom("blocks") - .where("chainId", "=", chainId) - .where("number", ">=", fromBlock.toString()) - .execute(); + await tx.deleteFrom("logs").where("chainId", "=", chainId).execute(); + await tx.deleteFrom("blocks").where("chainId", "=", chainId).execute(); await tx .deleteFrom("rpc_request_results") .where("chain_id", "=", chainId) - .where("block_number", ">=", fromBlock.toString()) - .execute(); - await tx - .deleteFrom("traces") - .where("chainId", "=", chainId) - .where("blockNumber", ">=", fromBlock.toString()) .execute(); + await tx.deleteFrom("traces").where("chainId", "=", chainId).execute(); await tx .deleteFrom("transactions") .where("chainId", "=", chainId) - .where("blockNumber", ">=", fromBlock.toString()) .execute(); await tx .deleteFrom("transactionReceipts") .where("chainId", "=", chainId) - .where("blockNumber", ">=", fromBlock.toString()) .execute(); }), ), diff --git a/packages/core/src/sync/events.test.ts b/packages/core/src/sync/events.test.ts index 6cb9c2abd..c64d9aee9 100644 --- a/packages/core/src/sync/events.test.ts +++ b/packages/core/src/sync/events.test.ts @@ -17,8 +17,8 @@ import { import { getAccountsConfigAndIndexingFunctions, getBlocksConfigAndIndexingFunctions, + getChain, getErc20ConfigAndIndexingFunctions, - getNetwork, getPairWithFactoryConfigAndIndexingFunctions, } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; @@ -30,13 +30,12 @@ import type { TransferEvent, } from "@/internal/types.js"; import type { LogFactory, LogFilter } from "@/internal/types.js"; +import { createRpc } from "@/rpc/index.js"; import type { SyncTrace, SyncTransaction } from "@/types/sync.js"; import { - encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, + MAX_CHECKPOINT_STRING, + ZERO_CHECKPOINT_STRING, } from "@/utils/checkpoint.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; import { _eth_getBlockByNumber, _eth_getLogs, @@ -88,7 +87,7 @@ test("decodeEvents() log", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -135,7 +134,7 @@ test("decodeEvents() log error", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: { @@ -164,7 +163,7 @@ test("decodeEvents() block", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: { number: 1n, } as RawEvent["block"], @@ -196,7 +195,7 @@ test("decodeEvents() transfer", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 3, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: undefined, @@ -242,7 +241,7 @@ test("decodeEvents() transaction", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 0, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: undefined, @@ -271,7 +270,7 @@ test("decodeEvents() trace", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 1, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: undefined, @@ -321,7 +320,7 @@ test("decodeEvents() trace error", async (context) => { const rawEvent = { chainId: 1, sourceIndex: 1, - checkpoint: encodeCheckpoint(zeroCheckpoint), + checkpoint: ZERO_CHECKPOINT_STRING, block: {} as RawEvent["block"], transaction: {} as RawEvent["transaction"], log: undefined, @@ -352,11 +351,8 @@ test("decodeEvents() trace error", async (context) => { test("buildEvents() matches getEvents() log", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -376,7 +372,7 @@ test("buildEvents() matches getEvents() log", async (context) => { // insert block 2 - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -386,7 +382,7 @@ test("buildEvents() matches getEvents() log", async (context) => { chainId: 1, }); - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -398,8 +394,8 @@ test("buildEvents() matches getEvents() log", async (context) => { const { events: events1 } = await syncStore.getEvents({ filters: sources.map((s) => s.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -427,11 +423,8 @@ test("buildEvents() matches getEvents() log", async (context) => { test("buildEvents() matches getEvents() log factory", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployFactory({ sender: ALICE }); const { result: pair } = await createPair({ @@ -457,7 +450,7 @@ test("buildEvents() matches getEvents() log factory", async (context) => { // insert block 2 - let rpcLogs = await _eth_getLogs(requestQueue, { + let rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -469,7 +462,7 @@ test("buildEvents() matches getEvents() log factory", async (context) => { // insert block 3 - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -479,7 +472,7 @@ test("buildEvents() matches getEvents() log factory", async (context) => { chainId: 1, }); - rpcLogs = await _eth_getLogs(requestQueue, { + rpcLogs = await _eth_getLogs(rpc, { fromBlock: 3, toBlock: 3, }); @@ -491,8 +484,8 @@ test("buildEvents() matches getEvents() log factory", async (context) => { const { events: events1 } = await syncStore.getEvents({ filters: sources.map((s) => s.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -522,11 +515,8 @@ test("buildEvents() matches getEvents() log factory", async (context) => { test("buildEvents() matches getEvents() block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -538,15 +528,15 @@ test("buildEvents() matches getEvents() block", async (context) => { // insert block 0 - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); const { events: events1 } = await syncStore.getEvents({ filters: sources.map((s) => s.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -574,11 +564,8 @@ test("buildEvents() matches getEvents() block", async (context) => { test("buildEvents() matches getEvents() transfer", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { hash } = await transferEth({ to: BOB, @@ -595,7 +582,7 @@ test("buildEvents() matches getEvents() transfer", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -605,7 +592,7 @@ test("buildEvents() matches getEvents() transfer", async (context) => { chainId: 1, }); - const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + const rpcReceipt = await _eth_getTransactionReceipt(rpc, { hash }); await syncStore.insertTransactionReceipts({ transactionReceipts: [rpcReceipt], @@ -641,8 +628,8 @@ test("buildEvents() matches getEvents() transfer", async (context) => { const { events: events1 } = await syncStore.getEvents({ filters: sources.map((s) => s.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -671,11 +658,8 @@ test("buildEvents() matches getEvents() transfer", async (context) => { test("buildEvents() matches getEvents() transaction", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { hash } = await transferEth({ to: BOB, @@ -693,7 +677,7 @@ test("buildEvents() matches getEvents() transaction", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -703,7 +687,7 @@ test("buildEvents() matches getEvents() transaction", async (context) => { chainId: 1, }); - const rpcReceipt = await _eth_getTransactionReceipt(requestQueue, { hash }); + const rpcReceipt = await _eth_getTransactionReceipt(rpc, { hash }); await syncStore.insertTransactionReceipts({ transactionReceipts: [rpcReceipt], @@ -712,8 +696,8 @@ test("buildEvents() matches getEvents() transaction", async (context) => { const { events: events1 } = await syncStore.getEvents({ filters: sources.map((s) => s.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); @@ -741,11 +725,8 @@ test("buildEvents() matches getEvents() transaction", async (context) => { test("buildEvents() matches getEvents() trace", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); const { hash } = await mintErc20({ @@ -764,7 +745,7 @@ test("buildEvents() matches getEvents() trace", async (context) => { rawIndexingFunctions, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); await syncStore.insertBlocks({ blocks: [rpcBlock], chainId: 1 }); @@ -811,8 +792,8 @@ test("buildEvents() matches getEvents() trace", async (context) => { const { events: events1 } = await syncStore.getEvents({ filters: sources.map((s) => s.filter), - from: encodeCheckpoint(zeroCheckpoint), - to: encodeCheckpoint(maxCheckpoint), + from: ZERO_CHECKPOINT_STRING, + to: MAX_CHECKPOINT_STRING, limit: 10, }); diff --git a/packages/core/src/sync/events.ts b/packages/core/src/sync/events.ts index 4ebb5e8ed..800725c65 100644 --- a/packages/core/src/sync/events.ts +++ b/packages/core/src/sync/events.ts @@ -23,9 +23,9 @@ import type { } from "@/types/sync.js"; import { EVENT_TYPES, + MAX_CHECKPOINT, + ZERO_CHECKPOINT, encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; import { never } from "@/utils/never.js"; import { startClock } from "@/utils/timer.js"; @@ -329,9 +329,9 @@ export const buildEvents = ({ blockTimestamp: hexToNumber(block.timestamp), chainId: BigInt(filter.chainId), blockNumber: hexToBigInt(block.number), - transactionIndex: maxCheckpoint.transactionIndex, + transactionIndex: MAX_CHECKPOINT.transactionIndex, eventType: EVENT_TYPES.blocks, - eventIndex: zeroCheckpoint.eventIndex, + eventIndex: ZERO_CHECKPOINT.eventIndex, }), block: convertBlock(block), log: undefined, diff --git a/packages/core/src/sync/filter.test.ts b/packages/core/src/sync/filter.test.ts index 2b29e7fe9..499a24a2e 100644 --- a/packages/core/src/sync/filter.test.ts +++ b/packages/core/src/sync/filter.test.ts @@ -12,8 +12,8 @@ import { import { getAccountsConfigAndIndexingFunctions, getBlocksConfigAndIndexingFunctions, + getChain, getErc20ConfigAndIndexingFunctions, - getNetwork, getPairWithFactoryConfigAndIndexingFunctions, } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; @@ -25,8 +25,8 @@ import type { TransactionFilter, TransferFilter, } from "@/internal/types.js"; +import { createRpc } from "@/rpc/index.js"; import type { SyncLog, SyncTrace } from "@/types/sync.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; import { _eth_getBlockByNumber, _eth_getLogs } from "@/utils/rpc.js"; import { type Address, @@ -82,11 +82,8 @@ test("getChildAddress() offset", () => { }); test("isLogFactoryMatched()", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployFactory({ sender: ALICE }); await createPair({ @@ -106,7 +103,7 @@ test("isLogFactoryMatched()", async (context) => { const filter = sources[0]!.filter as LogFilter; - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); @@ -135,11 +132,8 @@ test("isLogFactoryMatched()", async (context) => { }); test("isLogFilterMatched()", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -160,12 +154,12 @@ test("isLogFilterMatched()", async (context) => { const filter = sources[0]!.filter as LogFilter; - const rpcLogs = await _eth_getLogs(requestQueue, { + const rpcLogs = await _eth_getLogs(rpc, { fromBlock: 2, toBlock: 2, }); - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 2, }); @@ -196,11 +190,8 @@ test("isLogFilterMatched()", async (context) => { }); test("isBlockFilterMatched", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -213,7 +204,7 @@ test("isBlockFilterMatched", async (context) => { const filter = sources[0]!.filter as BlockFilter; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 0, }); @@ -234,11 +225,8 @@ test("isBlockFilterMatched", async (context) => { }); test("isTransactionFilterMatched()", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); await transferEth({ to: BOB, @@ -259,7 +247,7 @@ test("isTransactionFilterMatched()", async (context) => { // transaction:from const filter = sources[1]!.filter as TransactionFilter; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -281,11 +269,8 @@ test("isTransactionFilterMatched()", async (context) => { }); test("isTransferFilterMatched()", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { hash } = await transferEth({ to: BOB, @@ -306,7 +291,7 @@ test("isTransferFilterMatched()", async (context) => { // transfer:from const filter = sources[3]!.filter as TransferFilter; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 1, }); @@ -344,11 +329,8 @@ test("isTransferFilterMatched()", async (context) => { }); test("isTraceFilterMatched()", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -400,7 +382,7 @@ test("isTraceFilterMatched()", async (context) => { transactionHash: hash, } satisfies SyncTrace; - const rpcBlock = await _eth_getBlockByNumber(requestQueue, { + const rpcBlock = await _eth_getBlockByNumber(rpc, { blockNumber: 3, }); diff --git a/packages/core/src/sync/filter.ts b/packages/core/src/sync/filter.ts index dc40f84ab..6e2189cab 100644 --- a/packages/core/src/sync/filter.ts +++ b/packages/core/src/sync/filter.ts @@ -484,6 +484,7 @@ export const defaultTransactionReceiptInclude: `transactionReceipt.${keyof Trans const defaultTraceInclude: `trace.${keyof UserTrace}`[] = [ "trace.id", + "trace.traceIndex", "trace.type", "trace.from", "trace.to", diff --git a/packages/core/src/sync/index.test.ts b/packages/core/src/sync/index.test.ts index 6874b3ade..4422f4f48 100644 --- a/packages/core/src/sync/index.test.ts +++ b/packages/core/src/sync/index.test.ts @@ -1,47 +1,188 @@ import { - setupAnvil, setupCommon, setupDatabaseServices, setupIsolatedDatabase, } from "@/_test/setup.js"; +import { setupAnvil } from "@/_test/setup.js"; import { getBlocksConfigAndIndexingFunctions, - getNetwork, + getChain, testClient, } from "@/_test/utils.js"; import { buildConfigAndIndexingFunctions } from "@/build/configAndIndexingFunctions.js"; -import type { RawEvent } from "@/internal/types.js"; +import type { + BlockFilter, + Filter, + Fragment, + RawEvent, +} from "@/internal/types.js"; +import { createRpc } from "@/rpc/index.js"; +import { createHistoricalSync } from "@/sync-historical/index.js"; +import { createRealtimeSync } from "@/sync-realtime/index.js"; import { + MAX_CHECKPOINT_STRING, + ZERO_CHECKPOINT_STRING, decodeCheckpoint, - encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, } from "@/utils/checkpoint.js"; -import { wait } from "@/utils/wait.js"; +import { drainAsyncGenerator } from "@/utils/generators.js"; +import type { Interval } from "@/utils/interval.js"; +import { _eth_getBlockByNumber } from "@/utils/rpc.js"; import { promiseWithResolvers } from "@ponder/common"; import { beforeEach, expect, test, vi } from "vitest"; -import { type Sync, createSync } from "./index.js"; +import { getFragments } from "./fragments.js"; +import { + createSync, + getCachedBlock, + getChainCheckpoint, + getLocalEventGenerator, + getLocalSyncGenerator, + getLocalSyncProgress, + getPerChainOnRealtimeSyncEvent, + mergeAsyncGeneratorsWithEventOrder, + splitEvents, +} from "./index.js"; beforeEach(setupCommon); beforeEach(setupAnvil); beforeEach(setupIsolatedDatabase); -async function drainAsyncGenerator( - asyncGenerator: ReturnType, -) { - const result: RawEvent[] = []; +test("splitEvents()", async () => { + const events = [ + { + chainId: 1, + checkpoint: "0", + block: { + hash: "0x1", + timestamp: 1, + number: 1n, + }, + sourceIndex: 0, + }, + { + chainId: 1, + checkpoint: "0", + block: { + hash: "0x2", + timestamp: 2, + number: 2n, + }, + sourceIndex: 0, + }, + ] as unknown as RawEvent[]; + + const result = splitEvents(events); + + expect(result).toMatchInlineSnapshot(` + [ + { + "checkpoint": "000000000100000000000000010000000000000001999999999999999999999999999999999", + "events": [ + { + "block": { + "hash": "0x1", + "number": 1n, + "timestamp": 1, + }, + "chainId": 1, + "checkpoint": "0", + "sourceIndex": 0, + }, + ], + }, + { + "checkpoint": "000000000200000000000000010000000000000002999999999999999999999999999999999", + "events": [ + { + "block": { + "hash": "0x2", + "number": 2n, + "timestamp": 2, + }, + "chainId": 1, + "checkpoint": "0", + "sourceIndex": 0, + }, + ], + }, + ] + `); +}); + +test("getPerChainOnRealtimeSyncEvent() handles block", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); - for await (const { events } of asyncGenerator) { - result.push(...events); + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); + + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >(); + for (const source of sources) { + for (const { fragment } of getFragments(source.filter)) { + intervalsCache.set(source.filter, [{ fragment, intervals: [] }]); + } } - return result; -} + await testClient.mine({ blocks: 1 }); + + const syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc, + intervalsCache, + }); -test("createSync()", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); + const realtimeSync = createRealtimeSync({ + common: context.common, + chain, + sources, + rpc, + onEvent: async () => {}, + onFatalError: () => {}, + }); - const network = getNetwork(); + const onRealtimeSyncEvent = getPerChainOnRealtimeSyncEvent({ + common: context.common, + chain, + sources, + syncStore, + syncProgress, + realtimeSync, + }); + + const block = await _eth_getBlockByNumber(rpc, { + blockNumber: 1, + }); + + const event = await onRealtimeSyncEvent({ + type: "block", + hasMatchedFilter: false, + block, + logs: [], + factoryLogs: [], + traces: [], + transactions: [], + transactionReceipts: [], + }); + + expect(event.type).toBe("block"); + + await cleanup(); +}); + +test("getPerChainOnRealtimeSyncEvent() handles finalize", async (context) => { + const { cleanup, database, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -51,65 +192,259 @@ test("createSync()", async (context) => { rawIndexingFunctions, }); - const sync = await createSync({ + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >(); + for (const source of sources) { + for (const { fragment } of getFragments(source.filter)) { + intervalsCache.set(source.filter, [{ fragment, intervals: [] }]); + } + } + + // finalized block: 0 + + await testClient.mine({ blocks: 1 }); + + const syncProgress = await getLocalSyncProgress({ common: context.common, - indexingBuild: { sources, networks: [network] }, - syncStore, - onRealtimeEvent: async () => {}, + sources, + chain, + rpc, + intervalsCache, + }); + + const realtimeSync = createRealtimeSync({ + common: context.common, + chain, + sources, + rpc, + onEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); - expect(sync).toBeDefined(); + const onRealtimeSyncEvent = getPerChainOnRealtimeSyncEvent({ + common: context.common, + chain, + sources, + syncStore, + syncProgress, + realtimeSync, + }); - await sync.kill(); + const block = await _eth_getBlockByNumber(rpc, { + blockNumber: 1, + }); + + await onRealtimeSyncEvent({ + type: "block", + hasMatchedFilter: true, + block, + logs: [], + factoryLogs: [], + traces: [], + transactions: [], + transactionReceipts: [], + }); + + const event = await onRealtimeSyncEvent({ + type: "finalize", + block, + }); + + expect(event.type).toBe("finalize"); + + const blocks = await database.qb.sync + .selectFrom("blocks") + .selectAll() + .execute(); + + expect(blocks).toHaveLength(1); + + const intervals = await database.qb.sync + .selectFrom("intervals") + .selectAll() + .execute(); + + expect(intervals).toMatchInlineSnapshot(` + [ + { + "blocks": "{[0,2]}", + "chain_id": 1, + "fragment_id": "block_1_1_0", + }, + ] + `); await cleanup(); }); -test("getEvents() returns events", async (context) => { +test("getPerChainOnRealtimeSyncEvent() kills realtime when finalized", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - - const network = getNetwork(); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, }); + + // @ts-ignore + config.blocks.Blocks.endBlock = 1; + const { sources } = await buildConfigAndIndexingFunctions({ config, rawIndexingFunctions, }); + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >(); + for (const source of sources) { + for (const { fragment } of getFragments(source.filter)) { + intervalsCache.set(source.filter, [{ fragment, intervals: [] }]); + } + } + + // finalized block: 0 + await testClient.mine({ blocks: 1 }); - // finalized block: 1 - network.finalityBlockCount = 0; + const syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc, + intervalsCache, + }); - const sync = await createSync({ + const realtimeSync = createRealtimeSync({ + common: context.common, + chain, + sources, + rpc, + onEvent: async () => {}, + onFatalError: () => {}, + }); + + const onRealtimeSyncEvent = getPerChainOnRealtimeSyncEvent({ + common: context.common, + chain, + sources, syncStore, + syncProgress, + realtimeSync, + }); + + const block = await _eth_getBlockByNumber(rpc, { + blockNumber: 1, + }); + + await onRealtimeSyncEvent({ + type: "block", + hasMatchedFilter: false, + block, + logs: [], + factoryLogs: [], + traces: [], + transactions: [], + transactionReceipts: [], + }); + + const spy = vi.spyOn(realtimeSync, "kill"); + + await onRealtimeSyncEvent({ + type: "finalize", + block, + }); + + expect(spy).toHaveBeenCalled(); + + await cleanup(); +}); + +test("getPerChainOnRealtimeSyncEvent() handles reorg", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); + + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >(); + for (const source of sources) { + for (const { fragment } of getFragments(source.filter)) { + intervalsCache.set(source.filter, [{ fragment, intervals: [] }]); + } + } + + // finalized block: 0 + + await testClient.mine({ blocks: 1 }); + + const syncProgress = await getLocalSyncProgress({ common: context.common, - indexingBuild: { sources, networks: [network] }, + sources, + chain, + rpc, + intervalsCache, + }); - onRealtimeEvent: async () => {}, + const realtimeSync = createRealtimeSync({ + common: context.common, + chain, + sources, + rpc, + onEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); - const events = await drainAsyncGenerator(sync.getEvents()); + const onRealtimeSyncEvent = getPerChainOnRealtimeSyncEvent({ + common: context.common, + chain, + sources, + syncStore, + syncProgress, + realtimeSync, + }); - expect(events).toBeDefined(); - expect(events).toHaveLength(2); + const block = await _eth_getBlockByNumber(rpc, { + blockNumber: 1, + }); - await sync.kill(); + await onRealtimeSyncEvent({ + type: "block", + hasMatchedFilter: true, + block, + logs: [], + factoryLogs: [], + traces: [], + transactions: [], + transactionReceipts: [], + }); + + const event = await onRealtimeSyncEvent({ + type: "reorg", + block, + reorgedBlocks: [block], + }); + + expect(event.type).toBe("reorg"); await cleanup(); }); -test("getEvents() with cache", async (context) => { +test("getLocalEventGenerator()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -122,50 +457,171 @@ test("getEvents() with cache", async (context) => { await testClient.mine({ blocks: 1 }); // finalized block: 1 - network.finalityBlockCount = 0; + chain.finalityBlockCount = 0; - let sync = await createSync({ + const historicalSync = await createHistoricalSync({ + common: context.common, + chain, syncStore, + sources, + rpc: createRpc({ chain, common: context.common }), + onFatalError: () => {}, + }); + const syncProgress = await getLocalSyncProgress({ common: context.common, - indexingBuild: { sources, networks: [network] }, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - onRealtimeEvent: async () => {}, - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + const syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, }); - await drainAsyncGenerator(sync.getEvents()); + const eventGenerator = getLocalEventGenerator({ + common: context.common, + chain, + syncStore, + sources, + localSyncGenerator: syncGenerator, + from: getChainCheckpoint({ syncProgress, chain, tag: "start" })!, + to: getChainCheckpoint({ syncProgress, chain, tag: "finalized" })!, + limit: 100, + }); + + const events = await drainAsyncGenerator(eventGenerator); + expect(events).toHaveLength(1); + + await cleanup(); +}); + +test("getLocalEventGenerator() pagination", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); + + await testClient.mine({ blocks: 2 }); - const spy = vi.spyOn(syncStore, "insertIntervals"); + // finalized block: 2 + chain.finalityBlockCount = 0; - sync = await createSync({ + const historicalSync = await createHistoricalSync({ + common: context.common, + chain, syncStore, + sources, + rpc: createRpc({ chain, common: context.common }), + onFatalError: () => {}, + }); + const syncProgress = await getLocalSyncProgress({ common: context.common, - indexingBuild: { sources, networks: [network] }, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - onRealtimeEvent: async () => {}, + const syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, + }); + + const eventGenerator = getLocalEventGenerator({ + common: context.common, + chain, + syncStore, + sources, + localSyncGenerator: syncGenerator, + from: getChainCheckpoint({ syncProgress, chain, tag: "start" })!, + to: getChainCheckpoint({ syncProgress, chain, tag: "finalized" })!, + limit: 1, + }); + + const events = await drainAsyncGenerator(eventGenerator); + expect(events.length).toBeGreaterThan(1); + + await cleanup(); +}); + +test("getLocalSyncGenerator()", async (context) => { + const { cleanup, database, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); + + await testClient.mine({ blocks: 1 }); + + // finalized block: 1 + chain.finalityBlockCount = 0; + + const historicalSync = await createHistoricalSync({ + common: context.common, + chain, + syncStore, + sources, + rpc: createRpc({ chain, common: context.common }), onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); - const events = await drainAsyncGenerator(sync.getEvents()); + const syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - expect(spy).toHaveBeenCalledTimes(0); + const syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, + }); - expect(events).toBeDefined(); - expect(events).toHaveLength(2); + await drainAsyncGenerator(syncGenerator); - await sync.kill(); + const intervals = await database.qb.sync + .selectFrom("intervals") + .selectAll() + .execute(); + + expect(intervals).toMatchInlineSnapshot(` + [ + { + "blocks": "{[0,2]}", + "chain_id": 1, + "fragment_id": "block_1_1_0", + }, + ] + `); await cleanup(); }); -test("getEvents() end block", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - - const network = getNetwork(); +test("getLocalSyncGenerator() with partial cache", async (context) => { + const { cleanup, database, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -175,97 +631,166 @@ test("getEvents() end block", async (context) => { rawIndexingFunctions, }); - await testClient.mine({ blocks: 2 }); - - // finalized block: 2 - network.finalityBlockCount = 0; + await testClient.mine({ blocks: 1 }); - sources[0]!.filter.toBlock = 1; + // finalized block: 1 + chain.finalityBlockCount = 0; - const sync = await createSync({ + let historicalSync = await createHistoricalSync({ + common: context.common, + chain, syncStore, + sources, + rpc: createRpc({ chain, common: context.common }), + onFatalError: () => {}, + }); + let syncProgress = await getLocalSyncProgress({ common: context.common, - indexingBuild: { sources, networks: [network] }, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - onRealtimeEvent: async () => {}, + let syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, + }); + + await drainAsyncGenerator(syncGenerator); + + await testClient.mine({ blocks: 1 }); + + historicalSync = await createHistoricalSync({ + common: context.common, + chain, + syncStore, + sources, + rpc: createRpc({ chain, common: context.common }), onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); - const events = await drainAsyncGenerator(sync.getEvents()); + syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - expect(events).toBeDefined(); - expect(events).toHaveLength(2); + syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, + }); - await sync.kill(); + await drainAsyncGenerator(syncGenerator); + + const intervals = await database.qb.sync + .selectFrom("intervals") + .selectAll() + .execute(); + + expect(intervals).toMatchInlineSnapshot(` + [ + { + "blocks": "{[0,3]}", + "chain_id": 1, + "fragment_id": "block_1_1_0", + }, + ] + `); await cleanup(); }); -// TODO(kyle) This test is skipped because it causes a flake on ci. -// Our test setup is unable to properly mock a multichain environment -// The chain data of the chains in "network" is exactly the same. -// This test will fail when `sources[1]` finishes before `sources[0]`, because -// the `onConflictDoNothing` in `insertBlocks` causes the block with relavant data -// not to be added to the store. This test should be un-skipped when 1) we can mock -// multichain enviroments, and 2) when our sync-store is robust enough to handle -// multiple blocks with the same hash and different chain IDs. -test.skip("getEvents() multichain", async (context) => { +test("getLocalSyncGenerator() with full cache", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); - const { sources: sources1, networks: networks1 } = - await buildConfigAndIndexingFunctions({ - config, - rawIndexingFunctions, - }); + await testClient.mine({ blocks: 1 }); - const { sources: sources2, networks: networks2 } = - await buildConfigAndIndexingFunctions({ - config, - rawIndexingFunctions, - }); + // finalized block: 1 + chain.finalityBlockCount = 0; - await testClient.mine({ blocks: 2 }); + let historicalSync = await createHistoricalSync({ + common: context.common, + chain, + syncStore, + sources, + rpc, + onFatalError: () => {}, + }); - // finalized block: 2 - networks1[0]!.finalityBlockCount = 0; - networks2[0]!.finalityBlockCount = 0; + let syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - sources2[0]!.filter.chainId = 2; - sources2[0]!.filter.toBlock = 1; - networks2[0]!.chainId = 2; + let syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, + }); - const sync = await createSync({ - syncStore, - indexingBuild: { - sources: [...sources1, ...sources2], - networks: [...networks1, ...networks2], - }, + await drainAsyncGenerator(syncGenerator); + + historicalSync = await createHistoricalSync({ common: context.common, - onRealtimeEvent: async () => {}, + chain, + syncStore, + sources, + rpc: createRpc({ chain, common: context.common }), onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), }); - const events = await drainAsyncGenerator(sync.getEvents()); + syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc: createRpc({ chain, common: context.common }), + intervalsCache: historicalSync.intervalsCache, + }); - expect(events).toBeDefined(); - expect(events).toHaveLength(1); + syncGenerator = getLocalSyncGenerator({ + common: context.common, + chain, + syncProgress, + historicalSync, + }); - await sync.kill(); + const insertSpy = vi.spyOn(syncStore, "insertIntervals"); + const requestSpy = vi.spyOn(rpc, "request"); + + const checkpoints = await drainAsyncGenerator(syncGenerator); + expect(checkpoints).toHaveLength(1); + + expect(insertSpy).toHaveBeenCalledTimes(0); + expect(requestSpy).toHaveBeenCalledTimes(0); await cleanup(); }); -test("getEvents() updates status", async (context) => { - const { cleanup, syncStore } = await setupDatabaseServices(context); - - const network = getNetwork(); +test("getLocalSyncProgress()", async (context) => { + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -275,38 +800,357 @@ test("getEvents() updates status", async (context) => { rawIndexingFunctions, }); - await testClient.mine({ blocks: 2 }); + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >(); + for (const source of sources) { + for (const { fragment } of getFragments(source.filter)) { + intervalsCache.set(source.filter, [{ fragment, intervals: [] }]); + } + } - // finalized block: 2 - network.finalityBlockCount = 0; + const syncProgress = await getLocalSyncProgress({ + common: context.common, + sources, + chain, + rpc, + intervalsCache, + }); - const sync = await createSync({ - syncStore, + expect(syncProgress.finalized.number).toBe("0x0"); + expect(syncProgress.start.number).toBe("0x0"); + expect(syncProgress.end).toBe(undefined); + expect(syncProgress.current).toBe(undefined); +}); +test("getLocalSyncProgress() future end block", async (context) => { + const chain = getChain(); + const rpc = createRpc({ chain, common: context.common }); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + + // @ts-ignore + config.blocks.Blocks.endBlock = 12; + + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); + + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >(); + for (const source of sources) { + for (const { fragment } of getFragments(source.filter)) { + intervalsCache.set(source.filter, [{ fragment, intervals: [] }]); + } + } + + const syncProgress = await getLocalSyncProgress({ common: context.common, - indexingBuild: { sources, networks: [network] }, + sources, + chain, + rpc, + intervalsCache, + }); - onRealtimeEvent: async () => {}, - onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + expect(syncProgress.finalized.number).toBe("0x0"); + expect(syncProgress.start.number).toBe("0x0"); + expect(syncProgress.end).toMatchInlineSnapshot(` + { + "hash": "0x", + "number": "0xc", + "parentHash": "0x", + "timestamp": "0x2540be3ff", + } + `); + expect(syncProgress.current).toBe(undefined); +}); + +test("getCachedBlock() no cached intervals", async () => { + const filter = { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: 0, + toBlock: 100, + include: [], + } satisfies BlockFilter; + + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >([[filter, []]]); + + const cachedBlock = getCachedBlock({ + filters: [filter], + intervalsCache, }); - await drainAsyncGenerator(sync.getEvents()); + expect(cachedBlock).toBe(undefined); +}); - const status = sync.getStatus(); +test("getCachedBlock() with cache", async () => { + const filter = { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: 0, + toBlock: 100, + include: [], + } satisfies BlockFilter; + + let intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >([[filter, [{ fragment: {} as Fragment, intervals: [[0, 24]] }]]]); + + let cachedBlock = getCachedBlock({ + filters: [filter], + intervalsCache, + }); + + expect(cachedBlock).toBe(24); + + intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >([ + [ + filter, + [ + { + fragment: {} as Fragment, + intervals: [ + [0, 50], + [50, 102], + ], + }, + ], + ], + ]); + + cachedBlock = getCachedBlock({ + filters: [filter], + intervalsCache, + }); + + expect(cachedBlock).toBe(100); +}); + +test("getCachedBlock() with incomplete cache", async () => { + const filter = { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: 0, + toBlock: 100, + include: [], + } satisfies BlockFilter; + + const intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >([[filter, [{ fragment: {} as Fragment, intervals: [[1, 24]] }]]]); + + const cachedBlock = getCachedBlock({ + filters: [filter], + intervalsCache, + }); + + expect(cachedBlock).toBeUndefined(); +}); + +test("getCachedBlock() with multiple filters", async () => { + const filters = [ + { + type: "block", + chainId: 1, + interval: 1, + offset: 0, + fromBlock: 0, + toBlock: 100, + include: [], + }, + { + type: "block", + chainId: 1, + interval: 1, + offset: 1, + fromBlock: 50, + toBlock: 150, + include: [], + }, + ] satisfies BlockFilter[]; + + let intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >([ + [filters[0]!, [{ fragment: {} as Fragment, intervals: [[0, 24]] }]], + [filters[1]!, []], + ]); + + let cachedBlock = getCachedBlock({ + filters, + intervalsCache, + }); + + expect(cachedBlock).toBe(24); + + intervalsCache = new Map< + Filter, + { fragment: Fragment; intervals: Interval[] }[] + >([ + [filters[0]!, [{ fragment: {} as Fragment, intervals: [[0, 24]] }]], + [filters[1]!, [{ fragment: {} as Fragment, intervals: [[50, 102]] }]], + ]); + + cachedBlock = getCachedBlock({ + filters, + intervalsCache, + }); + + expect(cachedBlock).toBe(24); +}); + +test("mergeAsyncGeneratorsWithEventOrder()", async () => { + const p1 = promiseWithResolvers<{ events: RawEvent[]; checkpoint: string }>(); + const p2 = promiseWithResolvers<{ events: RawEvent[]; checkpoint: string }>(); + const p3 = promiseWithResolvers<{ events: RawEvent[]; checkpoint: string }>(); + const p4 = promiseWithResolvers<{ events: RawEvent[]; checkpoint: string }>(); + + async function* generator1() { + yield await p1.promise; + yield await p2.promise; + } + + async function* generator2() { + yield await p3.promise; + yield await p4.promise; + } + + const results: { events: RawEvent[]; checkpoint: string }[] = []; + const generator = mergeAsyncGeneratorsWithEventOrder([ + generator1(), + generator2(), + ]); + + (async () => { + for await (const result of generator) { + results.push(result); + } + })(); + + p1.resolve({ + events: [{ checkpoint: "01" }, { checkpoint: "07" }] as RawEvent[], + checkpoint: "10", + }); + p3.resolve({ + events: [{ checkpoint: "02" }, { checkpoint: "05" }] as RawEvent[], + checkpoint: "06", + }); + + await new Promise((res) => setTimeout(res)); + + p4.resolve({ + events: [{ checkpoint: "08" }, { checkpoint: "11" }] as RawEvent[], + checkpoint: "20", + }); + p2.resolve({ + events: [{ checkpoint: "08" }, { checkpoint: "13" }] as RawEvent[], + checkpoint: "20", + }); - expect(status[network.name]?.ready).toBe(false); - expect(status[network.name]?.block?.number).toBe(2); + await new Promise((res) => setTimeout(res)); + + expect(results).toMatchInlineSnapshot(` + [ + { + "checkpoint": "06", + "events": [ + { + "checkpoint": "01", + }, + { + "checkpoint": "02", + }, + { + "checkpoint": "05", + }, + ], + }, + { + "checkpoint": "10", + "events": [ + { + "checkpoint": "07", + }, + { + "checkpoint": "08", + }, + ], + }, + { + "checkpoint": "20", + "events": [ + { + "checkpoint": "08", + }, + { + "checkpoint": "11", + }, + { + "checkpoint": "13", + }, + ], + }, + ] + `); +}); + +test("createSync()", async (context) => { + const { cleanup, syncStore } = await setupDatabaseServices(context); + + const chain = getChain(); + + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ + interval: 1, + }); + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); + + const sync = await createSync({ + common: context.common, + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], + syncStore, + onRealtimeEvent: async () => {}, + onFatalError: () => {}, + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", + }); + + expect(sync).toBeDefined(); await sync.kill(); await cleanup(); }); -test("getEvents() pagination", async (context) => { +test("getEvents() multichain", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -316,36 +1160,38 @@ test("getEvents() pagination", async (context) => { rawIndexingFunctions, }); - await testClient.mine({ blocks: 2 }); - - // finalized block: 2 - network.finalityBlockCount = 0; + await testClient.mine({ blocks: 1 }); - context.common.options.syncEventsQuerySize = 1; + // finalized block: 1 + chain.finalityBlockCount = 0; const sync = await createSync({ syncStore, - common: context.common, - indexingBuild: { sources, networks: [network] }, - + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); - const events = await drainAsyncGenerator(sync.getEvents()); - expect(events).toHaveLength(3); + const events = await drainAsyncGenerator(sync.getEvents()).then((events) => + events.flat(), + ); + + expect(events).toBeDefined(); + expect(events).toHaveLength(2); await sync.kill(); await cleanup(); }); -test("getEvents() initialCheckpoint", async (context) => { +test("getEvents() omnichain", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -355,36 +1201,38 @@ test("getEvents() initialCheckpoint", async (context) => { rawIndexingFunctions, }); - await testClient.mine({ blocks: 2 }); + await testClient.mine({ blocks: 1 }); - // finalized block: 2 - network.finalityBlockCount = 0; + // finalized block: 1 + chain.finalityBlockCount = 0; const sync = await createSync({ syncStore, - common: context.common, - indexingBuild: { sources, networks: [network] }, - + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(maxCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "omnichain", }); - const events = await drainAsyncGenerator(sync.getEvents()); + const events = await drainAsyncGenerator(sync.getEvents()).then((events) => + events.flat(), + ); expect(events).toBeDefined(); - expect(events).toHaveLength(0); + expect(events).toHaveLength(2); await sync.kill(); await cleanup(); }); -test("getEvents() refetches finalized block", async (context) => { +test("getEvents() mulitchain updates status", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -397,37 +1245,36 @@ test("getEvents() refetches finalized block", async (context) => { await testClient.mine({ blocks: 2 }); // finalized block: 2 - network.finalityBlockCount = 0; - - context.common.options.syncHandoffStaleSeconds = 0.5; + chain.finalityBlockCount = 0; const sync = await createSync({ syncStore, common: context.common, - indexingBuild: { sources, networks: [network] }, - + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(maxCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); - // cause `latestFinalizedFetch` to be updated - const gen = sync.getEvents(); + await drainAsyncGenerator(sync.getEvents()); - await wait(1000); + const status = sync.getStatus(); - await drainAsyncGenerator(gen); + expect(status[chain.chain.name]?.ready).toBe(false); + expect(status[chain.chain.name]?.block?.number).toBe(2); await sync.kill(); await cleanup(); }); -test("startRealtime()", async (context) => { +test("getEvents() omnichain updates status", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -439,35 +1286,37 @@ test("startRealtime()", async (context) => { await testClient.mine({ blocks: 2 }); + // finalized block: 2 + chain.finalityBlockCount = 0; + const sync = await createSync({ syncStore, common: context.common, - indexingBuild: { sources, networks: [network] }, - + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); await drainAsyncGenerator(sync.getEvents()); - await sync.startRealtime(); - const status = sync.getStatus(); - expect(status[network.name]?.ready).toBe(true); - expect(status[network.name]?.block?.number).toBe(1); + expect(status[chain.chain.name]?.ready).toBe(false); + expect(status[chain.chain.name]?.block?.number).toBe(2); await sync.kill(); await cleanup(); }); -test("onEvent() handles block", async (context) => { +test("getEvents() with initial checkpoint", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -477,44 +1326,39 @@ test("onEvent() handles block", async (context) => { rawIndexingFunctions, }); - const promise = promiseWithResolvers(); - const events: RawEvent[] = []; + await testClient.mine({ blocks: 2 }); - await testClient.mine({ blocks: 1 }); + // finalized block: 2 + chain.finalityBlockCount = 0; const sync = await createSync({ syncStore, common: context.common, - indexingBuild: { sources, networks: [network] }, - - onRealtimeEvent: async (event) => { - if (event.type === "block") { - events.push(...event.events); - promise.resolve(); - } - }, + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], + onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: MAX_CHECKPOINT_STRING, + mode: "multichain", }); - await drainAsyncGenerator(sync.getEvents()); - - await sync.startRealtime(); - - await promise.promise; + const events = await drainAsyncGenerator(sync.getEvents()).then((events) => + events.flat(), + ); - expect(events).toHaveLength(1); + expect(events).toBeDefined(); + expect(events).toHaveLength(0); await sync.kill(); await cleanup(); }); -test("onEvent() handles finalize", async (context) => { +test("startRealtime()", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -524,113 +1368,95 @@ test("onEvent() handles finalize", async (context) => { rawIndexingFunctions, }); - const promise = promiseWithResolvers(); - let checkpoint: string; - - // finalized block: 0 - - network.finalityBlockCount = 2; + await testClient.mine({ blocks: 2 }); const sync = await createSync({ syncStore, common: context.common, - indexingBuild: { sources, networks: [network] }, - - onRealtimeEvent: async (event) => { - if (event.type === "finalize") { - checkpoint = event.checkpoint; - promise.resolve(); - } - }, + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], + onRealtimeEvent: async () => {}, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); - await testClient.mine({ blocks: 4 }); - await drainAsyncGenerator(sync.getEvents()); await sync.startRealtime(); - await promise.promise; + const status = sync.getStatus(); - expect(decodeCheckpoint(checkpoint!).blockNumber).toBe(2n); + expect(status[chain.chain.name]?.ready).toBe(true); + expect(status[chain.chain.name]?.block?.number).toBe(1); await sync.kill(); await cleanup(); }); -test.todo("onEvent() handles reorg"); - -test("onEvent() multichain gets all events", async (context) => { +test("onEvent() multichain handles block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); + const chain = getChain(); + const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, }); - const { sources: sources1, networks: networks1 } = - await buildConfigAndIndexingFunctions({ - config, - rawIndexingFunctions, - }); - - const { sources: sources2, networks: networks2 } = - await buildConfigAndIndexingFunctions({ - config, - rawIndexingFunctions, - }); - - // finalized block: 0 - - sources2[0]!.filter.chainId = 2; - networks2[0]!.chainId = 2; + const { sources } = await buildConfigAndIndexingFunctions({ + config, + rawIndexingFunctions, + }); const promise = promiseWithResolvers(); + const events: RawEvent[] = []; + + await testClient.mine({ blocks: 1 }); const sync = await createSync({ - common: context.common, - indexingBuild: { - sources: [...sources1, ...sources2], - networks: [...networks1, ...networks2], - }, syncStore, + + common: context.common, + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async (event) => { if (event.type === "block") { + events.push(...event.events); promise.resolve(); } }, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); - await testClient.mine({ blocks: 1 }); - await drainAsyncGenerator(sync.getEvents()); await sync.startRealtime(); await promise.promise; + expect(events).toHaveLength(1); + await sync.kill(); await cleanup(); }); -test("onEvent() multichain end block", async (context) => { +test("onEvent() omnichain handles block", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, }); - const { sources: sources1, networks: networks1 } = + const { sources: sources1, chains: chains1 } = await buildConfigAndIndexingFunctions({ config, rawIndexingFunctions, }); - const { sources: sources2, networks: networks2 } = + const { sources: sources2, chains: chains2 } = await buildConfigAndIndexingFunctions({ config, rawIndexingFunctions, @@ -639,8 +1465,7 @@ test("onEvent() multichain end block", async (context) => { // finalized block: 0 sources2[0]!.filter.chainId = 2; - sources2[0]!.filter.toBlock = 0; - networks2[0]!.chainId = 2; + chains2[0]!.chain.id = 2; const promise = promiseWithResolvers(); @@ -648,8 +1473,12 @@ test("onEvent() multichain end block", async (context) => { common: context.common, indexingBuild: { sources: [...sources1, ...sources2], - networks: [...networks1, ...networks2], + chains: [...chains1, ...chains2], }, + rpcs: [ + createRpc({ chain: chains1[0]!, common: context.common }), + createRpc({ chain: chains2[0]!, common: context.common }), + ], syncStore, onRealtimeEvent: async (event) => { if (event.type === "block") { @@ -657,7 +1486,8 @@ test("onEvent() multichain end block", async (context) => { } }, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "omnichain", }); await testClient.mine({ blocks: 1 }); @@ -673,10 +1503,10 @@ test("onEvent() multichain end block", async (context) => { await cleanup(); }); -test("onEvent() handles endBlock finalization", async (context) => { +test("onEvent() handles finalize", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -687,31 +1517,30 @@ test("onEvent() handles endBlock finalization", async (context) => { }); const promise = promiseWithResolvers(); + let checkpoint: string; // finalized block: 0 - await testClient.mine({ blocks: 2 }); - - network.finalityBlockCount = 2; - - sources[0]!.filter.toBlock = 1; + chain.finalityBlockCount = 2; const sync = await createSync({ syncStore, common: context.common, - indexingBuild: { sources, networks: [network] }, - + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async (event) => { if (event.type === "finalize") { + checkpoint = event.checkpoint; promise.resolve(); } }, onFatalError: () => {}, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); - await testClient.mine({ blocks: 2 }); + await testClient.mine({ blocks: 4 }); await drainAsyncGenerator(sync.getEvents()); @@ -719,15 +1548,19 @@ test("onEvent() handles endBlock finalization", async (context) => { await promise.promise; + expect(decodeCheckpoint(checkpoint!).blockNumber).toBe(2n); + await sync.kill(); await cleanup(); }); +test.todo("onEvent() handles reorg"); + test("onEvent() handles errors", async (context) => { const { cleanup, syncStore } = await setupDatabaseServices(context); - const network = getNetwork(); + const chain = getChain(); const { config, rawIndexingFunctions } = getBlocksConfigAndIndexingFunctions({ interval: 1, @@ -745,13 +1578,14 @@ test("onEvent() handles errors", async (context) => { syncStore, common: context.common, - indexingBuild: { sources, networks: [network] }, - + indexingBuild: { sources, chains: [chain] }, + rpcs: [createRpc({ chain, common: context.common })], onRealtimeEvent: async () => {}, onFatalError: () => { promise.resolve(); }, - initialCheckpoint: encodeCheckpoint(zeroCheckpoint), + initialCheckpoint: ZERO_CHECKPOINT_STRING, + mode: "multichain", }); await testClient.mine({ blocks: 4 }); diff --git a/packages/core/src/sync/index.ts b/packages/core/src/sync/index.ts index 5f727e43e..be3d44738 100644 --- a/packages/core/src/sync/index.ts +++ b/packages/core/src/sync/index.ts @@ -1,12 +1,15 @@ import type { Common } from "@/internal/common.js"; import type { + Chain, Factory, + Filter, IndexingBuild, - Network, RawEvent, + Seconds, Source, Status, } from "@/internal/types.js"; +import type { RPC } from "@/rpc/index.js"; import { type HistoricalSync, createHistoricalSync, @@ -20,14 +23,19 @@ import type { SyncStore } from "@/sync-store/index.js"; import type { LightBlock, SyncBlock } from "@/types/sync.js"; import { type Checkpoint, + MAX_CHECKPOINT, + ZERO_CHECKPOINT, + ZERO_CHECKPOINT_STRING, decodeCheckpoint, encodeCheckpoint, - maxCheckpoint, - zeroCheckpoint, + min, } from "@/utils/checkpoint.js"; import { estimate } from "@/utils/estimate.js"; -import { formatEta, formatPercentage } from "@/utils/format.js"; -import { mergeAsyncGenerators } from "@/utils/generators.js"; +import { formatPercentage } from "@/utils/format.js"; +import { + bufferAsyncGenerator, + mergeAsyncGenerators, +} from "@/utils/generators.js"; import { type Interval, intervalDifference, @@ -38,29 +46,20 @@ import { } from "@/utils/interval.js"; import { intervalUnion } from "@/utils/interval.js"; import { never } from "@/utils/never.js"; -import { type RequestQueue, createRequestQueue } from "@/utils/requestQueue.js"; +import { partition } from "@/utils/partition.js"; +import { _eth_getBlockByNumber } from "@/utils/rpc.js"; import { startClock } from "@/utils/timer.js"; -import { type Queue, createQueue } from "@ponder/common"; -import { - type Address, - type Hash, - type Transport, - hexToBigInt, - hexToNumber, - toHex, -} from "viem"; -import { _eth_getBlockByNumber } from "../utils/rpc.js"; +import { zipperMany } from "@/utils/zipper.js"; +import { type Address, type Hash, hexToBigInt, hexToNumber, toHex } from "viem"; import { buildEvents } from "./events.js"; import { isAddressFactory } from "./filter.js"; -import { cachedTransport } from "./transport.js"; export type Sync = { - getEvents(): AsyncGenerator<{ events: RawEvent[]; checkpoint: string }>; + getEvents(): AsyncGenerator; startRealtime(): Promise; getStatus(): Status; - getStartCheckpoint(): string; + seconds: Seconds; getFinalizedCheckpoint(): string; - getCachedTransport(network: Network): Transport; kill(): Promise; }; @@ -83,7 +82,6 @@ export type RealtimeEvent = export type SyncProgress = { start: SyncBlock | LightBlock; end: SyncBlock | LightBlock | undefined; - cached: SyncBlock | LightBlock | undefined; current: SyncBlock | LightBlock | undefined; finalized: SyncBlock | LightBlock; }; @@ -107,7 +105,7 @@ export const blockToCheckpoint = ( rounding: "up" | "down", ): Checkpoint => { return { - ...(rounding === "up" ? maxCheckpoint : zeroCheckpoint), + ...(rounding === "up" ? MAX_CHECKPOINT : ZERO_CHECKPOINT), blockTimestamp: hexToNumber(block.timestamp), chainId: BigInt(chainId), blockNumber: hexToBigInt(block.number), @@ -153,16 +151,6 @@ const getHistoricalLast = ( : syncProgress.end; }; -/** Compute the minimum checkpoint, filtering out undefined */ -const min = (...checkpoints: (string | undefined)[]) => { - return checkpoints.reduce((acc, cur) => { - if (cur === undefined) return acc; - if (acc === undefined) return cur; - if (acc < cur) return acc; - return cur; - })!; -}; - export const splitEvents = ( events: RawEvent[], ): { checkpoint: string; events: RawEvent[] }[] => { @@ -173,7 +161,7 @@ export const splitEvents = ( if (prevHash === undefined || prevHash !== event.block.hash) { result.push({ checkpoint: encodeCheckpoint({ - ...maxCheckpoint, + ...MAX_CHECKPOINT, blockTimestamp: Number(event.block.timestamp), chainId: BigInt(event.chainId), blockNumber: event.block.number, @@ -192,11 +180,11 @@ export const splitEvents = ( /** Returns the checkpoint for a given block tag. */ export const getChainCheckpoint = ({ syncProgress, - network, + chain, tag, }: { syncProgress: SyncProgress; - network: Network; + chain: Chain; tag: "start" | "current" | "finalized" | "end"; }): string | undefined => { if (tag === "end" && syncProgress.end === undefined) { @@ -211,7 +199,7 @@ export const getChainCheckpoint = ({ return encodeCheckpoint( blockToCheckpoint( block, - network.chainId, + chain.chain.id, // The checkpoint returned by this function is meant to be used in // a closed interval (includes endpoints), so "start" should be inclusive. tag === "start" ? "down" : "up", @@ -219,156 +207,43 @@ export const getChainCheckpoint = ({ ); }; -type CreateSyncParameters = { +export const createSync = async (params: { common: Common; - indexingBuild: Pick; + indexingBuild: Pick; + rpcs: RPC[]; syncStore: SyncStore; onRealtimeEvent(event: RealtimeEvent): Promise; onFatalError(error: Error): void; initialCheckpoint: string; -}; - -export const createSync = async (args: CreateSyncParameters): Promise => { - const perNetworkSync = new Map< - Network, + mode: "omnichain" | "multichain"; +}): Promise => { + const perChainSync = new Map< + Chain, { - requestQueue: RequestQueue; syncProgress: SyncProgress; historicalSync: HistoricalSync; realtimeSync: RealtimeSync; - realtimeQueue: Queue; - unfinalizedBlocks: Omit< - Extract, - "type" - >[]; } >(); - /** Events that have been executed but not finalized. */ - let executedEvents: RawEvent[] = []; - /** Events that have not been executed yet. */ - let pendingEvents: RawEvent[] = []; - const status: Status = {}; - let isKilled = false; - // Realtime events across all chains that can't be passed to the parent function - // because the overall checkpoint hasn't caught up to the events yet. - - // Instantiate `localSyncData` and `status` - await Promise.all( - args.indexingBuild.networks.map(async (network) => { - const requestQueue = createRequestQueue({ - network, - common: args.common, - }); - const sources = args.indexingBuild.sources.filter( - ({ filter }) => filter.chainId === network.chainId, - ); - - const { start, end, finalized } = await syncDiagnostic({ - common: args.common, - sources, - requestQueue, - network, - }); - - // Invalidate sync cache for devnet sources - if (network.disableCache) { - args.common.logger.warn({ - service: "sync", - msg: `Deleting cache records for '${network.name}' from block ${hexToNumber(start.number)}`, - }); - - await args.syncStore.pruneByChain({ - fromBlock: hexToNumber(start.number), - chainId: network.chainId, - }); - } - - const historicalSync = await createHistoricalSync({ - common: args.common, - sources, - syncStore: args.syncStore, - requestQueue, - network, - onFatalError: args.onFatalError, - }); - - const realtimeQueue = createQueue({ - initialStart: true, - browser: false, - concurrency: 1, - worker: async (event: RealtimeSyncEvent) => - onRealtimeSyncEvent({ event, network }), - }); - - const realtimeSync = createRealtimeSync({ - common: args.common, - sources, - requestQueue, - network, - onEvent: (event) => - realtimeQueue.add(event).catch((error) => { - args.common.logger.error({ - service: "sync", - msg: `Fatal error: Unable to process ${event.type} event`, - error, - }); - args.onFatalError(error); - }), - onFatalError: args.onFatalError, - }); - - const cached = await getCachedBlock({ - sources, - requestQueue, - historicalSync, - }); - - // Update "ponder_sync_block" metric - if (cached !== undefined) { - args.common.metrics.ponder_sync_block.set( - { network: network.name }, - hexToNumber(cached.number), - ); - } - - const syncProgress: SyncProgress = { - start, - end, - finalized, - cached, - current: cached, - }; - - args.common.metrics.ponder_sync_is_realtime.set( - { network: network.name }, - 0, - ); - args.common.metrics.ponder_sync_is_complete.set( - { network: network.name }, - 0, - ); - perNetworkSync.set(network, { - requestQueue, - syncProgress, - historicalSync, - realtimeSync, - realtimeQueue, - unfinalizedBlocks: [], - }); - status[network.name] = { block: null, ready: false }; - }), - ); + const getMultichainCheckpoint = ({ + tag, + chain, + }: { tag: "start" | "end" | "current" | "finalized"; chain: Chain }): + | string + | undefined => { + const syncProgress = perChainSync.get(chain)!.syncProgress; + return getChainCheckpoint({ syncProgress, chain, tag }); + }; - /** - * Returns the minimum checkpoint across all chains. - */ - const getOmnichainCheckpoint = ( - tag: "start" | "end" | "current" | "finalized", - ): string | undefined => { - const checkpoints = Array.from(perNetworkSync.entries()).map( - ([network, { syncProgress }]) => - getChainCheckpoint({ syncProgress, network, tag }), + const getOmnichainCheckpoint = ({ + tag, + }: { tag: "start" | "end" | "current" | "finalized" }): + | string + | undefined => { + const checkpoints = Array.from(perChainSync.entries()).map( + ([chain, { syncProgress }]) => + getChainCheckpoint({ syncProgress, chain, tag }), ); if (tag === "end" && checkpoints.some((c) => c === undefined)) { @@ -385,611 +260,645 @@ export const createSync = async (args: CreateSyncParameters): Promise => { const updateHistoricalStatus = ({ events, checkpoint, - network, - }: { events: RawEvent[]; checkpoint: string; network: Network }) => { - if (Number(decodeCheckpoint(checkpoint).chainId) === network.chainId) { - status[network.name]!.block = { + chain, + }: { events: RawEvent[]; checkpoint: string; chain: Chain }) => { + if (Number(decodeCheckpoint(checkpoint).chainId) === chain.chain.id) { + status[chain.chain.id]!.block = { timestamp: decodeCheckpoint(checkpoint).blockTimestamp, number: Number(decodeCheckpoint(checkpoint).blockNumber), }; - } else { - let i = events.length - 1; - while (i >= 0) { - const event = events[i]!; - - if (network.chainId === event.chainId) { - status[network.name]!.block = { - timestamp: decodeCheckpoint(event.checkpoint).blockTimestamp, - number: Number(decodeCheckpoint(event.checkpoint).blockNumber), - }; - } + return; + } - i--; + let i = events.length - 1; + while (i >= 0) { + const event = events[i]!; + + if (chain.chain.id === event.chainId) { + status[chain.chain.name]!.block = { + timestamp: decodeCheckpoint(event.checkpoint).blockTimestamp, + number: Number(decodeCheckpoint(event.checkpoint).blockNumber), + }; + return; } + + i--; } }; const updateRealtimeStatus = ({ checkpoint, - network, - }: { - checkpoint: string; - network: Network; - }) => { - const localBlock = perNetworkSync - .get(network)! + chain, + }: { checkpoint: string; chain: Chain }) => { + const localBlock = perChainSync + .get(chain)! .realtimeSync.unfinalizedBlocks.findLast( (block) => - encodeCheckpoint(blockToCheckpoint(block, network.chainId, "up")) <= + encodeCheckpoint(blockToCheckpoint(block, chain.chain.id, "up")) <= checkpoint, ); if (localBlock !== undefined) { - status[network.name]!.block = { + status[chain.chain.id]!.block = { timestamp: hexToNumber(localBlock.timestamp), number: hexToNumber(localBlock.number), }; } }; - /** - * Estimate optimal range (seconds) to query at a time, eventually - * used to determine `to` passed to `getEvents` - */ - let estimateSeconds = 1_000; - /** - * Omnichain `getEvents` - * - * Extract all events across `args.networks` ordered by checkpoint. - * The generator is "completed" when all event have been extracted - * before the minimum finalized checkpoint (supremum). - * - * Note: `syncStore.getEvents` is used to order between multiple - * networks. This approach is not future proof. - */ async function* getEvents() { - let latestFinalizedFetch = Date.now(); - - /** - * Calculate start checkpoint, if `initialCheckpoint` is non-zero, - * use that. Otherwise, use `startBlock` - */ - const start = - args.initialCheckpoint !== encodeCheckpoint(zeroCheckpoint) - ? args.initialCheckpoint - : getOmnichainCheckpoint("start")!; - - // Cursor used to track progress. - let from = start; - - let showLogs = true; - while (true) { - const syncGenerator = mergeAsyncGenerators( - Array.from(perNetworkSync.entries()).map( - ([network, { syncProgress, historicalSync }]) => - localHistoricalSyncGenerator({ - common: args.common, - network, - syncProgress, - historicalSync, - showLogs, - }), - ), - ); - - // Only show logs on the first iteration - showLogs = false; - - for await (const _ of syncGenerator) { - /** - * `current` is used to calculate the `to` checkpoint, if any - * network hasn't yet ingested a block, run another iteration of this loop. - * It is an invariant that `latestBlock` will eventually be defined. - */ - if ( - Array.from(perNetworkSync.values()).some( - ({ syncProgress }) => syncProgress.current === undefined, - ) - ) { - continue; - } + let cursor = + params.initialCheckpoint !== ZERO_CHECKPOINT_STRING + ? params.initialCheckpoint + : getOmnichainCheckpoint({ tag: "start" })!; + + const to = min( + getOmnichainCheckpoint({ tag: "end" }), + getOmnichainCheckpoint({ tag: "finalized" }), + ); - // Calculate the mininum "current" checkpoint, limited by "finalized" and "end" - const to = min( - getOmnichainCheckpoint("end"), - getOmnichainCheckpoint("finalized"), - getOmnichainCheckpoint("current"), + const eventGenerators = Array.from(perChainSync.entries()).map( + ([chain, { syncProgress, historicalSync }]) => { + const sources = params.indexingBuild.sources.filter( + ({ filter }) => filter.chainId === chain.chain.id, ); - let consecutiveErrors = 0; - - /* - * Extract events with `syncStore.getEvents()`, paginating to - * avoid loading too many events into memory. - */ - while (true) { - if (isKilled) return; - if (from >= to) break; - const getEventsMaxBatchSize = args.common.options.syncEventsQuerySize; - - // convert `estimateSeconds` to checkpoint - const estimatedTo = encodeCheckpoint({ - ...zeroCheckpoint, - blockTimestamp: Math.min( - decodeCheckpoint(from).blockTimestamp + estimateSeconds, - maxCheckpoint.blockTimestamp, - ), - }); - - try { - const { events, cursor } = await args.syncStore.getEvents({ - filters: args.indexingBuild.sources.map(({ filter }) => filter), - from, - to: to < estimatedTo ? to : estimatedTo, - limit: getEventsMaxBatchSize, - }); - - args.common.logger.debug({ - service: "sync", - msg: `Fetched ${events.length} events from the database for a ${formatEta(estimateSeconds * 1000)} range from timestamp ${decodeCheckpoint(from).blockTimestamp}`, - }); - - for (const network of args.indexingBuild.networks) { - updateHistoricalStatus({ events, checkpoint: cursor, network }); - } - - estimateSeconds = estimate({ - from: decodeCheckpoint(from).blockTimestamp, - to: decodeCheckpoint(cursor).blockTimestamp, - target: getEventsMaxBatchSize, - result: events.length, - min: 10, - max: 86_400, - prev: estimateSeconds, - maxIncrease: 1.08, - }); + const localSyncGenerator = getLocalSyncGenerator({ + common: params.common, + chain, + syncProgress, + historicalSync, + }); - consecutiveErrors = 0; - yield { events, checkpoint: to }; - from = cursor; - } catch (error) { - // Handle errors by reducing the requested range by 10x - estimateSeconds = Math.max(10, Math.round(estimateSeconds / 10)); + const localEventGenerator = getLocalEventGenerator({ + common: params.common, + chain, + syncStore: params.syncStore, + sources, + localSyncGenerator, + from: + params.initialCheckpoint !== ZERO_CHECKPOINT_STRING + ? params.initialCheckpoint + : getChainCheckpoint({ syncProgress, chain, tag: "start" })!, + to, + limit: Math.round( + params.common.options.syncEventsQuerySize / + (params.indexingBuild.chains.length * 2), + ), + }); - args.common.logger.debug({ - service: "sync", - msg: `Failed to fetch events from the database, retrying with a ${formatEta(estimateSeconds * 1000)} range`, - }); + return bufferAsyncGenerator(localEventGenerator, 1); + }, + ); - if (++consecutiveErrors > 4) throw error; - } - } + const mergeAsync = + params.mode === "multichain" + ? mergeAsyncGenerators + : mergeAsyncGeneratorsWithEventOrder; + + for await (const { events, checkpoint } of mergeAsync(eventGenerators)) { + if (params.mode === "multichain") { + const chain = params.indexingBuild.chains.find( + (chain) => + chain.chain.id === Number(decodeCheckpoint(checkpoint).chainId), + )!; + params.common.logger.debug({ + service: "sync", + msg: `Sequenced ${events.length} '${chain.chain.name}' events for timestamps [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(checkpoint).blockTimestamp}]`, + }); + } else { + params.common.logger.debug({ + service: "sync", + msg: `Sequenced ${events.length} events for timestamps [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(checkpoint).blockTimestamp}]`, + }); } - /** `true` if all networks have synced all known finalized blocks. */ - const allHistoricalSyncExhaustive = Array.from( - perNetworkSync.values(), - ).every(({ syncProgress }) => { - if (isSyncEnd(syncProgress)) return true; - - // Determine if `finalized` block is considered "stale" - const staleSeconds = (Date.now() - latestFinalizedFetch) / 1_000; - if (staleSeconds <= args.common.options.syncHandoffStaleSeconds) { - return true; - } + for (const chain of params.indexingBuild.chains) { + updateHistoricalStatus({ events, checkpoint, chain }); + } + yield events; + cursor = checkpoint; + } + } - return false; - }); + const getOnRealtimeSyncEvent = () => { + const checkpoints = { + // Note: `checkpoints.current` not used in multichain mode + current: ZERO_CHECKPOINT_STRING, + finalized: ZERO_CHECKPOINT_STRING, + }; - if (allHistoricalSyncExhaustive) break; + // Note: `latencyTimers` not used in multichain mode + const latencyTimers = new Map number>(); - /** At least one network has a `finalized` block that is considered "stale". */ + return ( + event: RealtimeSyncEvent, + { + chain, + syncProgress, + realtimeSync, + }: { + chain: Chain; + syncProgress: SyncProgress; + realtimeSync: RealtimeSync; + }, + ): void => { + switch (event.type) { + case "block": { + const events = buildEvents({ + sources: params.indexingBuild.sources, + chainId: chain.chain.id, + blockWithEventData: event, + finalizedChildAddresses: realtimeSync.finalizedChildAddresses, + unfinalizedChildAddresses: realtimeSync.unfinalizedChildAddresses, + }); - latestFinalizedFetch = Date.now(); + params.common.logger.debug({ + service: "sync", + msg: `Extracted ${events.length} '${chain.chain.name}' events for block ${hexToNumber(event.block.number)}`, + }); - await Promise.all( - Array.from(perNetworkSync.entries()).map( - async ([network, { requestQueue, syncProgress }]) => { - args.common.logger.debug({ - service: "sync", - msg: `Refetching '${network.name}' finalized block`, - }); + if (params.mode === "multichain") { + // Note: `checkpoints.current` not used in multichain mode + const checkpoint = getMultichainCheckpoint({ + tag: "current", + chain, + })!; - const latestBlock = await _eth_getBlockByNumber(requestQueue, { - blockTag: "latest", - }); + status[chain.chain.name]!.block = { + timestamp: hexToNumber(event.block.timestamp), + number: hexToNumber(event.block.number), + }; - const finalizedBlockNumber = Math.max( - 0, - hexToNumber(latestBlock.number) - network.finalityBlockCount, - ); + const readyEvents = events.concat(pendingEvents); + pendingEvents = []; + executedEvents = executedEvents.concat(readyEvents); - syncProgress.finalized = await _eth_getBlockByNumber(requestQueue, { - blockNumber: finalizedBlockNumber, + params.common.logger.debug({ + service: "sync", + msg: `Sequenced ${events.length} '${chain.chain.name}' events for block ${hexToNumber(event.block.number)}`, }); - const historicalLast = getHistoricalLast(syncProgress); + params + .onRealtimeEvent({ + type: "block", + checkpoint, + status: structuredClone(status), + events: readyEvents.sort((a, b) => + a.checkpoint < b.checkpoint ? -1 : 1, + ), + }) + .then(() => { + // update `ponder_realtime_latency` metric + if (event.endClock) { + params.common.metrics.ponder_realtime_latency.observe( + { network: chain.chain.name }, + event.endClock(), + ); + } + }); + } else { + const from = checkpoints.current; + checkpoints.current = getOmnichainCheckpoint({ tag: "current" })!; + const to = getOmnichainCheckpoint({ tag: "current" })!; + + if (event.endClock !== undefined) { + latencyTimers.set( + encodeCheckpoint( + blockToCheckpoint(event.block, chain.chain.id, "up"), + ), + event.endClock, + ); + } - // Set metric "ponder_historical_total_blocks" - args.common.metrics.ponder_historical_total_blocks.set( - { network: network.name }, - hexToNumber(historicalLast.number) - - hexToNumber(syncProgress.start.number) + - 1, - ); - }, - ), - ); - } - } + if (to > from) { + for (const chain of params.indexingBuild.chains) { + updateRealtimeStatus({ checkpoint: to, chain }); + } - /** - * Omnichain `onRealtimeSyncEvent` - * - * Handle callback events across all `args.networks`, and raising these - * events to `args.onRealtimeEvent` while maintaining checkpoint ordering. - */ - const onRealtimeSyncEvent = async ({ - network, - event, - }: { network: Network; event: RealtimeSyncEvent }) => { - const { syncProgress, realtimeSync, unfinalizedBlocks } = - perNetworkSync.get(network)!; + // Move ready events from pending to executed - switch (event.type) { - /** - * Handle a new block being ingested. - */ - case "block": { - // Update local sync, record checkpoint before and after - const from = getOmnichainCheckpoint("current")!; - syncProgress.current = event.block; - const to = getOmnichainCheckpoint("current")!; + const readyEvents = pendingEvents + .concat(events) + .filter(({ checkpoint }) => checkpoint < to); + pendingEvents = pendingEvents + .concat(events) + .filter(({ checkpoint }) => checkpoint > to); + executedEvents = executedEvents.concat(readyEvents); - // Update "ponder_sync_block" metric - args.common.metrics.ponder_sync_block.set( - { network: network.name }, - hexToNumber(syncProgress.current.number), - ); + params.common.logger.debug({ + service: "sync", + msg: `Sequenced ${events.length} '${chain.chain.name}' events for timestamps [${decodeCheckpoint(from).blockTimestamp}, ${decodeCheckpoint(to).blockTimestamp}]`, + }); - const newEvents = buildEvents({ - sources: args.indexingBuild.sources, - chainId: network.chainId, - blockWithEventData: event, - finalizedChildAddresses: realtimeSync.finalizedChildAddresses, - unfinalizedChildAddresses: realtimeSync.unfinalizedChildAddresses, - }); + params + .onRealtimeEvent({ + type: "block", + checkpoint: to, + status: structuredClone(status), + events: readyEvents.sort((a, b) => + a.checkpoint < b.checkpoint ? -1 : 1, + ), + }) + .then(() => { + // update `ponder_realtime_latency` metric + for (const [checkpoint, timer] of latencyTimers) { + if (checkpoint > from && checkpoint <= to) { + const chainId = Number( + decodeCheckpoint(checkpoint).chainId, + ); + const chain = params.indexingBuild.chains.find( + (chain) => chain.chain.id === chainId, + )!; + params.common.metrics.ponder_realtime_latency.observe( + { network: chain.chain.name }, + timer(), + ); + } + } + }); + } else { + pendingEvents = pendingEvents.concat(events); + } + } - unfinalizedBlocks.push(event); - pendingEvents.push(...newEvents); + break; + } - if (to > from) { - for (const network of args.indexingBuild.networks) { - updateRealtimeStatus({ checkpoint: to, network }); + case "finalize": { + const from = checkpoints.finalized; + checkpoints.finalized = getOmnichainCheckpoint({ tag: "finalized" })!; + const to = getOmnichainCheckpoint({ tag: "finalized" })!; + + if ( + params.mode === "omnichain" && + getChainCheckpoint({ syncProgress, chain, tag: "finalized" })! > + getOmnichainCheckpoint({ tag: "current" })! + ) { + params.common.logger.warn({ + service: "sync", + msg: `Finalized '${chain.chain.name}' block has surpassed overall indexing checkpoint`, + }); } - // Move events from pending to executed + // Remove all finalized data - const events = pendingEvents - .filter((event) => event.checkpoint < to) - .sort((a, b) => (a.checkpoint < b.checkpoint ? -1 : 1)); + executedEvents = executedEvents.filter((e) => e.checkpoint > to); - pendingEvents = pendingEvents.filter( - ({ checkpoint }) => checkpoint > to, - ); - executedEvents.push(...events); - - args - .onRealtimeEvent({ - type: "block", + // Raise event to parent function (runtime) + if (to > from) { + params.onRealtimeEvent({ + type: "finalize", checkpoint: to, - status: structuredClone(status), - events, - }) - .then(() => { - if (events.length > 0 && isKilled === false) { - args.common.logger.info({ - service: "app", - msg: `Indexed ${events.length} events`, - }); - } - - // update `ponder_realtime_latency` metric - for (const network of args.indexingBuild.networks) { - for (const { block, endClock } of perNetworkSync.get(network)! - .unfinalizedBlocks) { - const checkpoint = encodeCheckpoint( - blockToCheckpoint(block, network.chainId, "up"), - ); - if (checkpoint > from && checkpoint <= to && endClock) { - args.common.metrics.ponder_realtime_latency.observe( - { network: network.name }, - endClock(), - ); - } - } - } }); - } - - break; - } - /** - * Handle a new block being finalized. - */ - case "finalize": { - // Newly finalized range - const interval = [ - hexToNumber(syncProgress.finalized.number), - hexToNumber(event.block.number), - ] satisfies Interval; - - // Update local sync, record checkpoint before and after - const prev = getOmnichainCheckpoint("finalized")!; - syncProgress.finalized = event.block; - const checkpoint = getOmnichainCheckpoint("finalized")!; + } - if ( - getChainCheckpoint({ syncProgress, network, tag: "finalized" })! > - getOmnichainCheckpoint("current")! - ) { - args.common.logger.warn({ - service: "sync", - msg: `Finalized block for '${network.name}' has surpassed overall indexing checkpoint`, - }); + break; } - // Remove all finalized data - - const finalizedBlocks = unfinalizedBlocks.filter( - ({ block }) => - hexToNumber(block.number) <= hexToNumber(event.block.number), - ); - - perNetworkSync.get(network)!.unfinalizedBlocks = - unfinalizedBlocks.filter( - ({ block }) => - hexToNumber(block.number) > hexToNumber(event.block.number), - ); - - executedEvents = executedEvents.filter( - (e) => e.checkpoint > checkpoint, - ); - - // Add finalized blocks, logs, transactions, receipts, and traces to the sync-store. - - await Promise.all([ - args.syncStore.insertBlocks({ - blocks: finalizedBlocks - .filter(({ hasMatchedFilter }) => hasMatchedFilter) - .map(({ block }) => block), - chainId: network.chainId, - }), - args.syncStore.insertLogs({ - logs: finalizedBlocks.flatMap(({ logs, block }) => - logs.map((log) => ({ log, block })), - ), - shouldUpdateCheckpoint: true, - chainId: network.chainId, - }), - args.syncStore.insertLogs({ - logs: finalizedBlocks.flatMap(({ factoryLogs }) => - factoryLogs.map((log) => ({ log })), - ), - shouldUpdateCheckpoint: false, - chainId: network.chainId, - }), - args.syncStore.insertTransactions({ - transactions: finalizedBlocks.flatMap(({ transactions, block }) => - transactions.map((transaction) => ({ - transaction, - block, - })), - ), - chainId: network.chainId, - }), - args.syncStore.insertTransactionReceipts({ - transactionReceipts: finalizedBlocks.flatMap( - ({ transactionReceipts }) => transactionReceipts, - ), - chainId: network.chainId, - }), - args.syncStore.insertTraces({ - traces: finalizedBlocks.flatMap(({ traces, block, transactions }) => - traces.map((trace) => ({ - trace, - block, - transaction: transactions.find( - (t) => t.hash === trace.transactionHash, - )!, - })), - ), - chainId: network.chainId, - }), - ]); + case "reorg": { + // Remove all reorged data - // Add corresponding intervals to the sync-store - // Note: this should happen after so the database doesn't become corrupted - - if (network.disableCache === false) { - await args.syncStore.insertIntervals({ - intervals: args.indexingBuild.sources - .filter(({ filter }) => filter.chainId === network.chainId) - .map(({ filter }) => ({ filter, interval })), - chainId: network.chainId, - }); - } + let reorgedEvents = 0; - // Raise event to parent function (runtime) - if (checkpoint > prev) { - args.onRealtimeEvent({ type: "finalize", checkpoint }); - } + const isReorgedEvent = ({ chainId, block }: RawEvent) => { + if ( + chainId === chain.chain.id && + Number(block.number) > hexToNumber(event.block.number) + ) { + reorgedEvents++; + return true; + } + return false; + }; - /** - * The realtime service can be killed if `endBlock` is - * defined has become finalized. - */ - if (isSyncEnd(syncProgress)) { - args.common.metrics.ponder_sync_is_realtime.set( - { network: network.name }, - 0, + pendingEvents = pendingEvents.filter( + (e) => isReorgedEvent(e) === false, ); - args.common.metrics.ponder_sync_is_complete.set( - { network: network.name }, - 1, + executedEvents = executedEvents.filter( + (e) => isReorgedEvent(e) === false, ); - args.common.logger.info({ + + params.common.logger.debug({ service: "sync", - msg: `Synced final end block for '${network.name}' (${hexToNumber(syncProgress.end!.number)}), killing realtime sync service`, + msg: `Removed ${reorgedEvents} reorged '${chain.chain.name}' events`, }); - realtimeSync.kill(); - } - break; - } - /** - * Handle a reorg with a new common ancestor block being found. - */ - case "reorg": { - syncProgress.current = event.block; - // Note: this checkpoint is <= the previous checkpoint - const checkpoint = getOmnichainCheckpoint("current")!; - // Update "ponder_sync_block" metric - args.common.metrics.ponder_sync_block.set( - { network: network.name }, - hexToNumber(syncProgress.current.number), - ); + if (params.mode === "multichain") { + // Note: `checkpoints.current` not used in multichain mode + const checkpoint = getMultichainCheckpoint({ + tag: "current", + chain, + })!; - // Remove all reorged data + // Move events from executed to pending - perNetworkSync.get(network)!.unfinalizedBlocks = - unfinalizedBlocks.filter( - ({ block }) => - hexToNumber(block.number) <= hexToNumber(event.block.number), - ); + const events = executedEvents.filter( + (e) => e.checkpoint > checkpoint, + ); + executedEvents = executedEvents.filter( + (e) => e.checkpoint < checkpoint, + ); + pendingEvents = pendingEvents.concat(events); + + params.common.logger.debug({ + service: "sync", + msg: `Rescheduled ${events.length} reorged events`, + }); - const isReorgedEvent = ({ chainId, block }: RawEvent) => - chainId === network.chainId && - Number(block.number) > hexToNumber(event.block.number); + params.onRealtimeEvent({ type: "reorg", checkpoint }); + } else { + const from = checkpoints.current; + checkpoints.current = getOmnichainCheckpoint({ tag: "current" })!; + const to = getOmnichainCheckpoint({ tag: "current" })!; - pendingEvents = pendingEvents.filter( - (e) => isReorgedEvent(e) === false, - ); - executedEvents = executedEvents.filter( - (e) => isReorgedEvent(e) === false, - ); + // Move events from executed to pending - // Move events from executed to pending + const events = executedEvents.filter((e) => e.checkpoint > to); + executedEvents = executedEvents.filter((e) => e.checkpoint < to); + pendingEvents = pendingEvents.concat(events); - const events = executedEvents.filter((e) => e.checkpoint > checkpoint); - executedEvents = executedEvents.filter( - (e) => e.checkpoint < checkpoint, - ); - pendingEvents.push(...events); + params.common.logger.debug({ + service: "sync", + msg: `Rescheduled ${events.length} reorged events`, + }); - await args.syncStore.pruneRpcRequestResult({ - chainId: network.chainId, - blocks: event.reorgedBlocks, - }); + if (to < from) { + params.onRealtimeEvent({ + type: "reorg", + checkpoint: to, + }); + } + } - // Raise event to parent function (runtime) - args.onRealtimeEvent({ type: "reorg", checkpoint }); + break; + } - break; + default: + never(event); } - - default: - never(event); - } + }; }; + + /** Events that have been executed but not finalized. */ + let executedEvents: RawEvent[] = []; + /** Events that have not been executed. */ + let pendingEvents: RawEvent[] = []; + + const onRealtimeSyncEvent = getOnRealtimeSyncEvent(); + + await Promise.all( + params.indexingBuild.chains.map(async (chain, index) => { + const rpc = params.rpcs[index]!; + + const sources = params.indexingBuild.sources.filter( + ({ filter }) => filter.chainId === chain.chain.id, + ); + + // Invalidate sync cache for devnet sources + if (chain.disableCache) { + params.common.logger.warn({ + service: "sync", + msg: `Deleting cache records for '${chain.chain.name}'`, + }); + + await params.syncStore.pruneByChain({ + chainId: chain.chain.id, + }); + } + + const historicalSync = await createHistoricalSync({ + common: params.common, + sources, + syncStore: params.syncStore, + rpc, + chain, + onFatalError: params.onFatalError, + }); + + const syncProgress = await getLocalSyncProgress({ + common: params.common, + chain, + sources, + rpc, + intervalsCache: historicalSync.intervalsCache, + }); + + const realtimeSync = createRealtimeSync({ + common: params.common, + sources, + rpc, + chain, + onEvent: (event) => + perChainOnRealtimeSyncEvent(event) + .then((event) => + onRealtimeSyncEvent(event, { + chain, + syncProgress, + realtimeSync, + }), + ) + .catch((error) => { + params.common.logger.error({ + service: "sync", + msg: `Fatal error: Unable to process ${event.type} event`, + error, + }); + params.onFatalError(error); + }), + onFatalError: params.onFatalError, + }); + + params.common.metrics.ponder_sync_is_realtime.set( + { network: chain.chain.name }, + 0, + ); + params.common.metrics.ponder_sync_is_complete.set( + { network: chain.chain.name }, + 0, + ); + + perChainSync.set(chain, { + syncProgress, + historicalSync, + realtimeSync, + }); + + const perChainOnRealtimeSyncEvent = getPerChainOnRealtimeSyncEvent({ + common: params.common, + chain, + sources, + syncStore: params.syncStore, + syncProgress, + realtimeSync, + }); + }), + ); + + const status: Status = {}; + const seconds: Seconds = {}; + + for (const chain of params.indexingBuild.chains) { + status[chain.chain.name] = { block: null, ready: false }; + } + + if (params.mode === "multichain") { + for (const chain of params.indexingBuild.chains) { + seconds[chain.chain.name] = { + start: decodeCheckpoint( + getMultichainCheckpoint({ tag: "start", chain })!, + ).blockTimestamp, + end: decodeCheckpoint( + min( + getOmnichainCheckpoint({ tag: "end" }), + getOmnichainCheckpoint({ tag: "finalized" }), + ), + ).blockTimestamp, + cached: decodeCheckpoint(params.initialCheckpoint).blockTimestamp, + }; + } + } else { + const start = decodeCheckpoint( + getOmnichainCheckpoint({ tag: "start" })!, + ).blockTimestamp; + const end = decodeCheckpoint( + min( + getOmnichainCheckpoint({ tag: "end" }), + getOmnichainCheckpoint({ tag: "finalized" }), + ), + ).blockTimestamp; + for (const chain of params.indexingBuild.chains) { + seconds[chain.chain.name] = { + start, + end, + cached: decodeCheckpoint(params.initialCheckpoint).blockTimestamp, + }; + } + } + return { getEvents, async startRealtime() { - for (const network of args.indexingBuild.networks) { - const { syncProgress, realtimeSync } = perNetworkSync.get(network)!; + for (const chain of params.indexingBuild.chains) { + const { syncProgress, realtimeSync } = perChainSync.get(chain)!; - const filters = args.indexingBuild.sources - .filter(({ filter }) => filter.chainId === network.chainId) + const filters = params.indexingBuild.sources + .filter(({ filter }) => filter.chainId === chain.chain.id) .map(({ filter }) => filter); - status[network.name]!.block = { + status[chain.chain.id]!.block = { number: hexToNumber(syncProgress.current!.number), timestamp: hexToNumber(syncProgress.current!.timestamp), }; - status[network.name]!.ready = true; + status[chain.chain.id]!.ready = true; // Fetch any events between the omnichain finalized checkpoint and the single-chain // finalized checkpoint and add them to pendingEvents. These events are synced during // the historical phase, but must be indexed in the realtime phase because events // synced in realtime on other chains might be ordered before them. - const from = getOmnichainCheckpoint("finalized")!; + const from = getOmnichainCheckpoint({ tag: "finalized" })!; const finalized = getChainCheckpoint({ syncProgress, - network, + chain, tag: "finalized", })!; const end = getChainCheckpoint({ syncProgress, - network, + chain, tag: "end", })!; const to = min(finalized, end); if (to > from) { - const events = await args.syncStore.getEvents({ filters, from, to }); - pendingEvents.push(...events.events); + const events = await params.syncStore.getEvents({ + filters, + from, + to, + }); + + params.common.logger.debug({ + service: "sync", + msg: `Extracted and scheduled ${events.events.length} '${chain.chain.name}' events`, + }); + + pendingEvents = pendingEvents.concat(events.events); } if (isSyncEnd(syncProgress)) { - args.common.metrics.ponder_sync_is_complete.set( - { network: network.name }, + params.common.metrics.ponder_sync_is_complete.set( + { network: chain.chain.name }, 1, ); } else { - args.common.metrics.ponder_sync_is_realtime.set( - { network: network.name }, + params.common.metrics.ponder_sync_is_realtime.set( + { network: chain.chain.name }, 1, ); const initialChildAddresses = new Map>(); for (const filter of filters) { - if ("address" in filter && isAddressFactory(filter.address)) { - const addresses = await args.syncStore.getChildAddresses({ - filter: filter.address, - }); + switch (filter.type) { + case "log": + if (isAddressFactory(filter.address)) { + const addresses = await params.syncStore.getChildAddresses({ + filter: filter.address, + }); + + initialChildAddresses.set(filter.address, new Set(addresses)); + } + break; + + case "transaction": + case "transfer": + case "trace": + if (isAddressFactory(filter.fromAddress)) { + const addresses = await params.syncStore.getChildAddresses({ + filter: filter.fromAddress, + }); + + initialChildAddresses.set( + filter.fromAddress, + new Set(addresses), + ); + } + + if (isAddressFactory(filter.toAddress)) { + const addresses = await params.syncStore.getChildAddresses({ + filter: filter.toAddress, + }); + + initialChildAddresses.set( + filter.toAddress, + new Set(addresses), + ); + } - initialChildAddresses.set(filter.address, new Set(addresses)); + break; } } + params.common.logger.debug({ + service: "sync", + msg: `Initialized '${chain.chain.name}' realtime sync with ${initialChildAddresses.size} factory child addresses`, + }); + realtimeSync.start({ syncProgress, initialChildAddresses }); } } }, - getStartCheckpoint() { - return getOmnichainCheckpoint("start")!; - }, - getFinalizedCheckpoint() { - return getOmnichainCheckpoint("finalized")!; - }, getStatus() { return status; }, - getCachedTransport(network) { - const { requestQueue } = perNetworkSync.get(network)!; - return cachedTransport({ requestQueue, syncStore: args.syncStore }); + seconds, + getFinalizedCheckpoint() { + return getOmnichainCheckpoint({ tag: "finalized" })!; }, async kill() { - isKilled = true; const promises: Promise[] = []; - for (const network of args.indexingBuild.networks) { - const { historicalSync, realtimeSync, realtimeQueue } = - perNetworkSync.get(network)!; + for (const chain of params.indexingBuild.chains) { + const { historicalSync, realtimeSync } = perChainSync.get(chain)!; historicalSync.kill(); - realtimeQueue.pause(); - realtimeQueue.clear(); - promises.push(realtimeQueue.onIdle()); promises.push(realtimeSync.kill()); } await Promise.all(promises); @@ -997,182 +906,346 @@ export const createSync = async (args: CreateSyncParameters): Promise => { }; }; -/** ... */ -export const syncDiagnostic = async ({ +export const getPerChainOnRealtimeSyncEvent = ({ common, + chain, sources, - network, - requestQueue, + syncStore, + syncProgress, + realtimeSync, }: { common: Common; + chain: Chain; sources: Source[]; - network: Network; - requestQueue: RequestQueue; + syncStore: SyncStore; + syncProgress: SyncProgress; + realtimeSync: RealtimeSync; }) => { - /** Earliest `startBlock` among all `filters` */ - const start = Math.min(...sources.map(({ filter }) => filter.fromBlock ?? 0)); - /** - * Latest `endBlock` among all filters. `undefined` if at least one - * of the filters doesn't have an `endBlock`. - */ - const end = sources.some(({ filter }) => filter.toBlock === undefined) - ? undefined - : Math.max(...sources.map(({ filter }) => filter.toBlock!)); - - const [remoteChainId, startBlock, latestBlock] = await Promise.all([ - requestQueue.request({ method: "eth_chainId" }), - _eth_getBlockByNumber(requestQueue, { blockNumber: start }), - _eth_getBlockByNumber(requestQueue, { blockTag: "latest" }), - ]); - - const endBlock = - end === undefined - ? undefined - : end > hexToBigInt(latestBlock.number) - ? ({ - number: toHex(end), - hash: "0x", - parentHash: "0x", - timestamp: toHex(maxCheckpoint.blockTimestamp), - } as LightBlock) - : await _eth_getBlockByNumber(requestQueue, { blockNumber: end }); + let unfinalizedBlocks: Omit< + Extract, + "type" + >[] = []; - // Warn if the config has a different chainId than the remote. - if (hexToNumber(remoteChainId) !== network.chainId) { - common.logger.warn({ - service: "sync", - msg: `Remote chain ID (${remoteChainId}) does not match configured chain ID (${network.chainId}) for network "${network.name}"`, - }); - } + return async (event: RealtimeSyncEvent): Promise => { + switch (event.type) { + case "block": { + syncProgress.current = event.block; - const finalizedBlockNumber = Math.max( - 0, - hexToNumber(latestBlock.number) - network.finalityBlockCount, - ); + common.logger.debug({ + service: "sync", + msg: `Updated '${chain.chain.name}' current block to ${hexToNumber(event.block.number)}`, + }); - const finalizedBlock = await _eth_getBlockByNumber(requestQueue, { - blockNumber: finalizedBlockNumber, - }); + common.metrics.ponder_sync_block.set( + { network: chain.chain.name }, + hexToNumber(syncProgress.current.number), + ); - return { - start: startBlock, - end: endBlock, - finalized: finalizedBlock, + unfinalizedBlocks.push(event); + + return event; + } + + case "finalize": { + // Newly finalized range + const finalizedInterval = [ + hexToNumber(syncProgress.finalized.number), + hexToNumber(event.block.number), + ] satisfies Interval; + + syncProgress.finalized = event.block; + + common.logger.debug({ + service: "sync", + msg: `Updated '${chain.chain.name}' finalized block to ${hexToNumber(event.block.number)}`, + }); + + // Remove all finalized data + + const finalizedBlocks = unfinalizedBlocks.filter( + ({ block }) => + hexToNumber(block.number) <= hexToNumber(event.block.number), + ); + + unfinalizedBlocks = unfinalizedBlocks.filter( + ({ block }) => + hexToNumber(block.number) > hexToNumber(event.block.number), + ); + + // Add finalized blocks, logs, transactions, receipts, and traces to the sync-store. + + await Promise.all([ + syncStore.insertBlocks({ + blocks: finalizedBlocks + .filter(({ hasMatchedFilter }) => hasMatchedFilter) + .map(({ block }) => block), + chainId: chain.chain.id, + }), + syncStore.insertLogs({ + logs: finalizedBlocks.flatMap(({ logs, block }) => + logs.map((log) => ({ log, block })), + ), + shouldUpdateCheckpoint: true, + chainId: chain.chain.id, + }), + syncStore.insertLogs({ + logs: finalizedBlocks.flatMap(({ factoryLogs }) => + factoryLogs.map((log) => ({ log })), + ), + shouldUpdateCheckpoint: false, + chainId: chain.chain.id, + }), + syncStore.insertTransactions({ + transactions: finalizedBlocks.flatMap(({ transactions, block }) => + transactions.map((transaction) => ({ + transaction, + block, + })), + ), + chainId: chain.chain.id, + }), + syncStore.insertTransactionReceipts({ + transactionReceipts: finalizedBlocks.flatMap( + ({ transactionReceipts }) => transactionReceipts, + ), + chainId: chain.chain.id, + }), + syncStore.insertTraces({ + traces: finalizedBlocks.flatMap(({ traces, block, transactions }) => + traces.map((trace) => ({ + trace, + block, + transaction: transactions.find( + (t) => t.hash === trace.transactionHash, + )!, + })), + ), + chainId: chain.chain.id, + }), + ]); + + // Add corresponding intervals to the sync-store + // Note: this should happen after insertion so the database doesn't become corrupted + + if (chain.disableCache === false) { + const syncedIntervals: { + interval: Interval; + filter: Filter; + }[] = []; + + for (const { filter } of sources) { + const intervals = intervalIntersection( + [finalizedInterval], + [ + [ + filter.fromBlock ?? 0, + filter.toBlock ?? Number.POSITIVE_INFINITY, + ], + ], + ); + + for (const interval of intervals) { + syncedIntervals.push({ interval, filter }); + } + } + + await syncStore.insertIntervals({ + intervals: syncedIntervals, + chainId: chain.chain.id, + }); + } + + // The realtime service can be killed if `endBlock` is + // defined has become finalized. + + if (isSyncFinalized(syncProgress) && isSyncEnd(syncProgress)) { + common.metrics.ponder_sync_is_realtime.set( + { network: chain.chain.name }, + 0, + ); + common.metrics.ponder_sync_is_complete.set( + { network: chain.chain.name }, + 1, + ); + common.logger.info({ + service: "sync", + msg: `Killing '${chain.chain.name}' live indexing because the end block ${hexToNumber(syncProgress.end!.number)} has been finalized`, + }); + realtimeSync.kill(); + } + + return event; + } + + case "reorg": { + syncProgress.current = event.block; + + common.logger.debug({ + service: "sync", + msg: `Updated '${chain.chain.name}' current block to ${hexToNumber(event.block.number)}`, + }); + + common.metrics.ponder_sync_block.set( + { network: chain.chain.name }, + hexToNumber(syncProgress.current.number), + ); + + // Remove all reorged data + + unfinalizedBlocks = unfinalizedBlocks.filter( + ({ block }) => + hexToNumber(block.number) <= hexToNumber(event.block.number), + ); + + await syncStore.pruneRpcRequestResult({ + chainId: chain.chain.id, + blocks: event.reorgedBlocks, + }); + + return event; + } + } }; }; -/** Returns the closest-to-tip block that has been synced for all `sources`. */ -export const getCachedBlock = ({ - sources, - requestQueue, - historicalSync, -}: { +export async function* getLocalEventGenerator(params: { + common: Common; + chain: Chain; + syncStore: SyncStore; sources: Source[]; - requestQueue: RequestQueue; - historicalSync: HistoricalSync; -}): Promise | undefined => { - const latestCompletedBlocks = sources.map(({ filter }) => { - const requiredInterval = [ - filter.fromBlock ?? 0, - filter.toBlock ?? Number.POSITIVE_INFINITY, - ] satisfies Interval; - const fragmentIntervals = historicalSync.intervalsCache.get(filter)!; + localSyncGenerator: AsyncGenerator; + from: string; + to: string; + limit: number; +}): AsyncGenerator<{ events: RawEvent[]; checkpoint: string }> { + let cursor = params.from; + // Estimate optimal range (seconds) to query at a time, eventually + // used to determine `to` passed to `getEvents`. + let estimateSeconds = 1_000; - const completedIntervals = sortIntervals( - intervalIntersection( - [requiredInterval], - intervalIntersectionMany( - fragmentIntervals.map(({ intervals }) => intervals), + params.common.logger.debug({ + service: "sync", + msg: `Initialized '${params.chain.chain.name}' extract query for timestamps [${decodeCheckpoint(params.from).blockTimestamp}, ${decodeCheckpoint(params.to).blockTimestamp}]`, + }); + + for await (const syncCheckpoint of bufferAsyncGenerator( + params.localSyncGenerator, + Number.POSITIVE_INFINITY, + )) { + let consecutiveErrors = 0; + while (cursor < min(syncCheckpoint, params.to)) { + const estimateCheckpoint = encodeCheckpoint({ + ...ZERO_CHECKPOINT, + chainId: BigInt(params.chain.chain.id), + blockTimestamp: Math.min( + decodeCheckpoint(cursor).blockTimestamp + estimateSeconds, + MAX_CHECKPOINT.blockTimestamp, ), - ), - ); + }); + const to = min(syncCheckpoint, estimateCheckpoint, params.to); + try { + const { events, cursor: queryCursor } = + await params.syncStore.getEvents({ + filters: params.sources.map(({ filter }) => filter), + from: cursor, + to, + limit: params.limit, + }); - if (completedIntervals.length === 0) return undefined; + params.common.logger.debug({ + service: "sync", + msg: `Extracted ${events.length} '${params.chain.chain.name}' events for timestamps [${decodeCheckpoint(cursor).blockTimestamp}, ${decodeCheckpoint(queryCursor).blockTimestamp}]`, + }); - const earliestCompletedInterval = completedIntervals[0]!; - if (earliestCompletedInterval[0] !== (filter.fromBlock ?? 0)) { - return undefined; - } - return earliestCompletedInterval[1]; - }); + estimateSeconds = estimate({ + from: decodeCheckpoint(cursor).blockTimestamp, + to: decodeCheckpoint(queryCursor).blockTimestamp, + target: params.limit, + result: events.length, + min: 10, + max: 86_400, + prev: estimateSeconds, + maxIncrease: 1.08, + }); - const minCompletedBlock = Math.min( - ...(latestCompletedBlocks.filter( - (block) => block !== undefined, - ) as number[]), - ); + params.common.logger.debug({ + service: "sync", + msg: `Updated '${params.chain.chain.name}' extract query estimate to ${estimateSeconds} seconds`, + }); - /** Filter i has known progress if a completed interval is found or if - * `_latestCompletedBlocks[i]` is undefined but `sources[i].filter.fromBlock` - * is > `_minCompletedBlock`. - */ - if ( - latestCompletedBlocks.every( - (block, i) => - block !== undefined || - (sources[i]!.filter.fromBlock ?? 0) > minCompletedBlock, - ) - ) { - return _eth_getBlockByNumber(requestQueue, { - blockNumber: minCompletedBlock, - }); - } + consecutiveErrors = 0; + cursor = queryCursor; + yield { events, checkpoint: cursor }; + } catch (error) { + // Handle errors by reducing the requested range by 10x + estimateSeconds = Math.max(10, Math.round(estimateSeconds / 10)); - return undefined; -}; + params.common.logger.debug({ + service: "sync", + msg: `Updated '${params.chain.chain.name}' getEvents query estimate to ${estimateSeconds} seconds`, + }); -/** Predictive pagination and metrics for `historicalSync.sync()` */ -export async function* localHistoricalSyncGenerator({ + if (++consecutiveErrors > 4) throw error; + } + } + } +} + +export async function* getLocalSyncGenerator({ common, - network, + chain, syncProgress, historicalSync, - showLogs, }: { common: Common; - network: Network; + chain: Chain; syncProgress: SyncProgress; historicalSync: HistoricalSync; - showLogs: boolean; -}): AsyncGenerator { - // Return immediately if the `syncProgress.start` is unfinalized +}): AsyncGenerator { + const label = { network: chain.chain.name }; + + let cursor = hexToNumber(syncProgress.start.number); + const last = getHistoricalLast(syncProgress); + + // Estimate optimal range (blocks) to sync at a time, eventually to be used to + // determine `interval` passed to `historicalSync.sync()`. + let estimateRange = 25; + + // Handle two special cases: + // 1. `syncProgress.start` > `syncProgress.finalized` + // 2. `cached` is defined + if ( hexToNumber(syncProgress.start.number) > hexToNumber(syncProgress.finalized.number) ) { syncProgress.current = syncProgress.finalized; - // Update "ponder_sync_block" metric + common.logger.warn({ + service: "sync", + msg: `Skipped '${chain.chain.name}' historical sync because the start block is not finalized`, + }); + common.metrics.ponder_sync_block.set( - { network: network.name }, + label, hexToNumber(syncProgress.current.number), ); - - if (showLogs) { - common.logger.warn({ - service: "historical", - msg: `Skipped historical sync for '${network.name}' because the start block is not finalized`, - }); - } - - const label = { network: network.name }; - // Set "ponder_historical_total_blocks" common.metrics.ponder_historical_total_blocks.set(label, 0); - // Set "ponder_historical_sync_cached_blocks" common.metrics.ponder_historical_cached_blocks.set(label, 0); return; } - const historicalLast = getHistoricalLast(syncProgress); - - // Intialize metrics - const totalInterval = [ hexToNumber(syncProgress.start.number), - hexToNumber(historicalLast.number), + hexToNumber(last.number), ] satisfies Interval; + common.logger.debug({ + service: "sync", + msg: `Initialized '${chain.chain.name}' historical sync for blocks [${totalInterval[0]}, ${totalInterval[1]}]`, + }); + const requiredIntervals = Array.from( historicalSync.intervalsCache.entries(), ).flatMap(([filter, fragmentIntervals]) => @@ -1193,110 +1266,91 @@ export async function* localHistoricalSyncGenerator({ ); const required = intervalSum(intervalUnion(requiredIntervals)); - const total = totalInterval[1] - totalInterval[0] + 1; - const label = { network: network.name }; - // Set "ponder_historical_total_blocks" common.metrics.ponder_historical_total_blocks.set(label, total); - // Set "ponder_historical_sync_cached_blocks" common.metrics.ponder_historical_cached_blocks.set(label, total - required); - if (showLogs) { - common.logger.info({ - service: "historical", - msg: `Started syncing '${network.name}' with ${formatPercentage( - (total - required) / total, - )} cached`, - }); - } + // Handle cache hit + if (syncProgress.current !== undefined) { + common.metrics.ponder_sync_block.set( + label, + hexToNumber(syncProgress.current.number), + ); - /** - * Estimate optimal range (blocks) to sync at a time, eventually to be used to - * determine `interval` passed to `historicalSync.sync()`. - */ - let estimateRange = 25; - // Cursor to track progress. - let fromBlock = hexToNumber(syncProgress.start.number); - - /** - * Handle a cache hit by fast forwarding and potentially exiting. - * A cache hit can either be: (listed by priority) - * 1) recovering progress from earlier invocations with different `finalized` blocks - * 2) recovering progress from the interval cache - */ - if ( - syncProgress.current !== undefined && - (syncProgress.cached === undefined || - hexToNumber(syncProgress.current.number) > - hexToNumber(syncProgress.cached.number)) - ) { - fromBlock = hexToNumber(syncProgress.current.number) + 1; - } else if (syncProgress.cached !== undefined) { // `getEvents` can make progress without calling `sync`, so immediately "yield" - yield; - - if ( - hexToNumber(syncProgress.cached.number) === - hexToNumber(historicalLast.number) - ) { - if (showLogs) { - common.logger.info({ - service: "historical", - msg: `Skipped historical sync for '${network.name}' because all blocks are cached.`, - }); - } + yield encodeCheckpoint( + blockToCheckpoint(syncProgress.current, chain.chain.id, "up"), + ); + + if (hexToNumber(syncProgress.current.number) === hexToNumber(last.number)) { + common.logger.info({ + service: "sync", + msg: `Skipped '${chain.chain.name}' historical sync because all blocks are cached.`, + }); return; + } else { + common.logger.info({ + service: "sync", + msg: `Started '${chain.chain.name}' historical sync with ${formatPercentage( + (total - required) / total, + )} cached`, + }); } - fromBlock = hexToNumber(syncProgress.cached.number) + 1; + cursor = hexToNumber(syncProgress.current.number) + 1; + } else { + common.logger.info({ + service: "historical", + msg: `Started '${chain.chain.name}' historical sync`, + }); } while (true) { - /** - * Select a range of blocks to sync bounded by `finalizedBlock`. - * - * It is important for devEx that the interval is not too large, because - * time spent syncing ≈ time before indexing function feedback. - */ + // Select a range of blocks to sync bounded by `finalizedBlock`. + // It is important for devEx that the interval is not too large, because + // time spent syncing ≈ time before indexing function feedback. + const interval: Interval = [ - Math.min(fromBlock, hexToNumber(historicalLast.number)), - Math.min(fromBlock + estimateRange, hexToNumber(historicalLast.number)), + Math.min(cursor, hexToNumber(last.number)), + Math.min(cursor + estimateRange, hexToNumber(last.number)), ]; const endClock = startClock(); - const syncBlock = await historicalSync.sync(interval); + const synced = await historicalSync.sync(interval); + + common.logger.debug({ + service: "sync", + msg: `Synced ${interval[1] - interval[0] + 1} '${chain.chain.name}' blocks [${interval[0]}, ${interval[1]}]`, + }); // Update cursor to record progress - fromBlock = interval[1] + 1; - - if (syncBlock === undefined) { - /** - * `syncBlock` will be undefined if a cache hit occur in `historicalSync.sync()`. - * If the all known blocks are synced, then update `syncProgress.current`, else - * progress to the next iteration. - */ - if (interval[1] === hexToNumber(historicalLast.number)) { - syncProgress.current = historicalLast; + cursor = interval[1] + 1; + + // `synced` will be undefined if a cache hit occur in `historicalSync.sync()`. + + if (synced === undefined) { + // If the all known blocks are synced, then update `syncProgress.current`, else + // progress to the next iteration. + if (interval[1] === hexToNumber(last.number)) { + syncProgress.current = last; } else { continue; } } else { - if (interval[1] === hexToNumber(historicalLast.number)) { - syncProgress.current = historicalLast; + if (interval[1] === hexToNumber(last.number)) { + syncProgress.current = last; } else { - syncProgress.current = syncBlock; + syncProgress.current = synced; } const duration = endClock(); - // Update "ponder_sync_block" metric common.metrics.ponder_sync_block.set( label, - hexToNumber(syncProgress.current.number), + hexToNumber(syncProgress.current!.number), ); - common.metrics.ponder_historical_duration.observe(label, duration); common.metrics.ponder_historical_completed_blocks.inc( label, @@ -1313,12 +1367,203 @@ export async function* localHistoricalSyncGenerator({ estimateRange * 2, 100_000, ); + + common.logger.debug({ + service: "sync", + msg: `Updated '${chain.chain.name}' historical sync estimate to ${estimateRange} blocks`, + }); } - yield; + yield encodeCheckpoint( + blockToCheckpoint(syncProgress.current!, chain.chain.id, "up"), + ); if (isSyncEnd(syncProgress) || isSyncFinalized(syncProgress)) { + common.logger.info({ + service: "sync", + msg: `Completed '${chain.chain.name}' historical sync`, + }); return; } } } + +export const getLocalSyncProgress = async ({ + common, + sources, + chain, + rpc, + intervalsCache, +}: { + common: Common; + sources: Source[]; + chain: Chain; + rpc: RPC; + intervalsCache: HistoricalSync["intervalsCache"]; +}): Promise => { + const syncProgress = {} as SyncProgress; + const filters = sources.map(({ filter }) => filter); + + // Earliest `fromBlock` among all `filters` + const start = Math.min(...filters.map((filter) => filter.fromBlock ?? 0)); + const cached = getCachedBlock({ filters, intervalsCache }); + + const diagnostics = await Promise.all( + cached === undefined + ? [ + rpc.request({ method: "eth_chainId" }), + _eth_getBlockByNumber(rpc, { blockTag: "latest" }), + _eth_getBlockByNumber(rpc, { blockNumber: start }), + ] + : [ + rpc.request({ method: "eth_chainId" }), + _eth_getBlockByNumber(rpc, { blockTag: "latest" }), + _eth_getBlockByNumber(rpc, { blockNumber: start }), + _eth_getBlockByNumber(rpc, { blockNumber: cached }), + ], + ); + + const finalized = Math.max( + 0, + hexToNumber(diagnostics[1].number) - chain.finalityBlockCount, + ); + syncProgress.finalized = await _eth_getBlockByNumber(rpc, { + blockNumber: finalized, + }); + syncProgress.start = diagnostics[2]; + if (diagnostics.length === 4) { + syncProgress.current = diagnostics[3]; + } + + // Warn if the config has a different chainId than the remote. + if (hexToNumber(diagnostics[0]) !== chain.chain.id) { + common.logger.warn({ + service: "sync", + msg: `Remote chain ID (${diagnostics[0]}) does not match configured chain ID (${chain.chain.id}) for network "${chain.chain.name}"`, + }); + } + + if (filters.some((filter) => filter.toBlock === undefined)) { + return syncProgress; + } + + // Latest `toBlock` among all `filters` + const end = Math.max(...filters.map((filter) => filter.toBlock!)); + + if (end > hexToNumber(diagnostics[1].number)) { + syncProgress.end = { + number: toHex(end), + hash: "0x", + parentHash: "0x", + timestamp: toHex(MAX_CHECKPOINT.blockTimestamp), + } satisfies LightBlock; + } else { + syncProgress.end = await _eth_getBlockByNumber(rpc, { + blockNumber: end, + }); + } + + return syncProgress; +}; + +/** Returns the closest-to-tip block that has been synced for all `sources`. */ +export const getCachedBlock = ({ + filters, + intervalsCache, +}: { + filters: Filter[]; + intervalsCache: HistoricalSync["intervalsCache"]; +}): number | undefined => { + const latestCompletedBlocks = filters.map((filter) => { + const requiredInterval = [ + filter.fromBlock ?? 0, + filter.toBlock ?? Number.POSITIVE_INFINITY, + ] satisfies Interval; + const fragmentIntervals = intervalsCache.get(filter)!; + + const completedIntervals = sortIntervals( + intervalIntersection( + [requiredInterval], + intervalIntersectionMany( + fragmentIntervals.map(({ intervals }) => intervals), + ), + ), + ); + + if (completedIntervals.length === 0) return undefined; + + const earliestCompletedInterval = completedIntervals[0]!; + if (earliestCompletedInterval[0] !== (filter.fromBlock ?? 0)) { + return undefined; + } + return earliestCompletedInterval[1]; + }); + + const minCompletedBlock = Math.min( + ...(latestCompletedBlocks.filter( + (block) => block !== undefined, + ) as number[]), + ); + + // Filter i has known progress if a completed interval is found or if + // `_latestCompletedBlocks[i]` is undefined but `filters[i].fromBlock` + // is > `_minCompletedBlock`. + + if ( + latestCompletedBlocks.every( + (block, i) => + block !== undefined || (filters[i]!.fromBlock ?? 0) > minCompletedBlock, + ) + ) { + return minCompletedBlock; + } + + return undefined; +}; + +/** + * Merges multiple event generators into a single generator while preserving + * the order of events. + * + * @param generators - Generators to merge. + * @returns A single generator that yields events from all generators. + */ +export async function* mergeAsyncGeneratorsWithEventOrder( + generators: AsyncGenerator<{ events: RawEvent[]; checkpoint: string }>[], +): AsyncGenerator<{ events: RawEvent[]; checkpoint: string }> { + const results = await Promise.all(generators.map((gen) => gen.next())); + + while (results.some((res) => res.done !== true)) { + const supremum = min( + ...results.map((res) => (res.done ? undefined : res.value.checkpoint)), + ); + + const eventArrays: RawEvent[][] = []; + + for (const result of results) { + if (result.done === false) { + const [left, right] = partition( + result.value.events, + (event) => event.checkpoint <= supremum, + ); + + eventArrays.push(left); + result.value.events = right; + } + } + + const events = zipperMany(eventArrays).sort((a, b) => + a.checkpoint < b.checkpoint ? -1 : 1, + ); + + const index = results.findIndex( + (res) => res.done === false && res.value.checkpoint === supremum, + ); + + const resultPromise = generators[index]!.next(); + if (events.length > 0) { + yield { events, checkpoint: supremum }; + } + results[index] = await resultPromise; + } +} diff --git a/packages/core/src/sync/transport.test.ts b/packages/core/src/sync/transport.test.ts index aa7a7eb53..1f9ae7bad 100644 --- a/packages/core/src/sync/transport.test.ts +++ b/packages/core/src/sync/transport.test.ts @@ -6,8 +6,8 @@ import { setupIsolatedDatabase, } from "@/_test/setup.js"; import { deployErc20, mintErc20 } from "@/_test/simulate.js"; -import { anvil, getNetwork, publicClient } from "@/_test/utils.js"; -import { createRequestQueue } from "@/utils/requestQueue.js"; +import { anvil, getChain, publicClient } from "@/_test/utils.js"; +import { createRpc } from "@/rpc/index.js"; import { type Transport, parseEther } from "viem"; import { toHex } from "viem"; import { assertType, beforeEach, expect, test, vi } from "vitest"; @@ -18,16 +18,13 @@ beforeEach(setupAnvil); beforeEach(setupIsolatedDatabase); test("default", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { syncStore, cleanup } = await setupDatabaseServices(context); const transport = cachedTransport({ - requestQueue, + rpc, syncStore, }); @@ -53,17 +50,14 @@ test("default", async (context) => { }); test("request() block dependent method", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { syncStore, cleanup } = await setupDatabaseServices(context); const blockNumber = await publicClient.getBlockNumber(); const transport = cachedTransport({ - requestQueue, + rpc, syncStore, })({ chain: anvil, @@ -93,11 +87,8 @@ test("request() block dependent method", async (context) => { }); test("request() non-block dependent method", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { address } = await deployErc20({ sender: ALICE }); await mintErc20({ @@ -112,7 +103,7 @@ test("request() non-block dependent method", async (context) => { const block = await publicClient.getBlock({ blockNumber: blockNumber }); const transport = cachedTransport({ - requestQueue, + rpc, syncStore, })({ chain: anvil, @@ -142,15 +133,12 @@ test("request() non-block dependent method", async (context) => { }); test("request() non-cached method", async (context) => { - const network = getNetwork(); - const requestQueue = createRequestQueue({ - network, - common: context.common, - }); + const chain = getChain(); + const rpc = createRpc({ common: context.common, chain }); const { syncStore, cleanup } = await setupDatabaseServices(context); const transport = cachedTransport({ - requestQueue, + rpc, syncStore, })({ chain: anvil, diff --git a/packages/core/src/sync/transport.ts b/packages/core/src/sync/transport.ts index ae903fb42..47d38a52e 100644 --- a/packages/core/src/sync/transport.ts +++ b/packages/core/src/sync/transport.ts @@ -1,7 +1,7 @@ +import type { RPC } from "@/rpc/index.js"; import type { SyncStore } from "@/sync-store/index.js"; import { toLowerCase } from "@/utils/lowercase.js"; import { orderObject } from "@/utils/order.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; import type { Hex, Transport } from "viem"; import { custom, hexToBigInt, maxUint256 } from "viem"; @@ -34,10 +34,10 @@ const nonBlockDependentMethods = new Set([ ]); export const cachedTransport = ({ - requestQueue, + rpc, syncStore, }: { - requestQueue: RequestQueue; + rpc: RPC; syncStore: SyncStore; }): Transport => { return ({ chain }) => { @@ -94,7 +94,7 @@ export const cachedTransport = ({ return cachedResult; } } else { - const response = await requestQueue.request(body); + const response = await rpc.request(body); await syncStore.insertRpcRequestResult({ ...cacheKey, result: JSON.stringify(response), @@ -102,7 +102,7 @@ export const cachedTransport = ({ return response; } } else { - return requestQueue.request(body); + return rpc.request(body); } }, }); diff --git a/packages/core/src/types/virtual.test-d.ts b/packages/core/src/types/virtual.test-d.ts index bfa28130a..dcab0b284 100644 --- a/packages/core/src/types/virtual.test-d.ts +++ b/packages/core/src/types/virtual.test-d.ts @@ -207,33 +207,15 @@ test("FormatEventName with blocks", () => { }); test("Context db", () => { - type a = Virtual.Context["db"]; + type a = Virtual.Context["db"]; // ^? assertType({} as any as Db); assertType>({} as any as a); }); -test("Context single network", () => { - type a = Virtual.Context< - typeof config, - typeof schema, - "c1:Event0" - >["network"]; - // ^? - - type expectedNetwork = { name: "mainnet"; chainId: 1 }; - - assertType({} as any as expectedNetwork); - assertType({} as any as a); -}); - -test("Context multi network", () => { - type a = Virtual.Context< - typeof config, - typeof schema, - "c2:Event1()" - >["network"]; +test("Context network", () => { + type a = Virtual.Context["network"]; // ^? type expectedNetwork = @@ -245,21 +227,19 @@ test("Context multi network", () => { }); test("Context block network", () => { - type a = Virtual.Context["network"]; + type a = Virtual.Context["network"]; // ^? - type expectedNetwork = { name: "mainnet"; chainId: 1 }; + type expectedNetwork = + | { name: "mainnet"; chainId: 1 } + | { name: "optimism"; chainId: 10 }; assertType({} as any as expectedNetwork); assertType({} as any as a); }); test("Context client", () => { - type a = Virtual.Context< - typeof config, - typeof schema, - "c2:Event1()" - >["client"]; + type a = Virtual.Context["client"]; // ^? type expectedFunctions = @@ -275,11 +255,7 @@ test("Context client", () => { }); test("Context contracts", () => { - type a = Virtual.Context< - typeof config, - typeof schema, - "c2:Event1()" - >["contracts"]["c2"]; + type a = Virtual.Context["contracts"]["c2"]; // ^? type expectedAbi = [Event1, Event1Overloaded, Func1, Func1Overloaded]; @@ -300,28 +276,6 @@ test("Context contracts", () => { assertType({} as any as a["address"]); }); -test("Context network without event", () => { - type a = Virtual.Context< - // ^? - typeof config, - typeof schema, - Virtual.EventNames - >["network"]; - - type expectedNetwork = - | { - name: "mainnet"; - chainId: 1; - } - | { - name: "optimism"; - chainId: 10; - }; - - assertType({} as any as expectedNetwork); - assertType({} as any as a); -}); - test("Event", () => { type a = Virtual.Event; // ^? diff --git a/packages/core/src/types/virtual.ts b/packages/core/src/types/virtual.ts index b2309428d..1e8e73581 100644 --- a/packages/core/src/types/virtual.ts +++ b/packages/core/src/types/virtual.ts @@ -166,22 +166,7 @@ export namespace Virtual { >[property], > = ([base] extends [never] ? undefined : base) | override; - export type Context< - config extends Config, - schema extends Schema, - name extends EventNames, - /// - sourceName extends ExtractSourceName = ExtractSourceName, - sourceNetwork = sourceName extends sourceName - ? - | (unknown extends config["contracts"][sourceName]["network"] - ? never - : config["contracts"][sourceName]["network"]) - | (unknown extends config["blocks"][sourceName]["network"] - ? never - : config["blocks"][sourceName]["network"]) - : never, - > = { + export type Context = { contracts: { [_contractName in keyof config["contracts"]]: { abi: config["contracts"][_contractName]["abi"]; @@ -199,20 +184,15 @@ export namespace Virtual { >; }; }; - network: sourceNetwork extends string - ? // 1. No network overriding - { - name: sourceNetwork; - chainId: config["networks"][sourceNetwork]["chainId"]; - } - : // 2. Network overrides - { - [key in keyof sourceNetwork]: { - name: key; - chainId: config["networks"][key & - keyof config["networks"]]["chainId"]; - }; - }[keyof sourceNetwork]; + network: keyof config["networks"] extends infer network extends + keyof config["networks"] + ? network extends network + ? { + name: network; + chainId: config["networks"][network]["chainId"]; + } + : never + : never; client: Prettify; db: Db; }; @@ -223,7 +203,7 @@ export namespace Virtual { name extends EventNames, > = { event: Event; - context: Context; + context: Context; }; export type Registry = { @@ -231,7 +211,7 @@ export namespace Virtual { _name: name, indexingFunction: ( args: { event: Event } & { - context: Prettify>; + context: Prettify>; }, ) => Promise | void, ) => void; diff --git a/packages/core/src/ui/app.tsx b/packages/core/src/ui/app.tsx index b152aff17..abb7891fb 100644 --- a/packages/core/src/ui/app.tsx +++ b/packages/core/src/ui/app.tsx @@ -26,9 +26,9 @@ export const buildUiState = (): UiState => { hasError: false, overall: { completedSeconds: 0, + cachedSeconds: 0, totalSeconds: 0, progress: 0, - completedToTimestamp: 0, totalEvents: 0, }, events: [], @@ -121,10 +121,8 @@ const App = (ui: UiState) => { ( {app.mode === "historical" ? ( historical - ) : app.mode === "realtime" ? ( - live ) : ( - complete + live )} ) @@ -132,13 +130,17 @@ const App = (ui: UiState) => { - + {" "} - {formatPercentage(app.progress)} + {formatPercentage(app.mode === "realtime" ? 1 : app.progress)} {app.eta === undefined || app.eta === 0 ? null - : ` (${formatEta(app.eta)} eta)`} + : ` (${formatEta(app.eta * 1_000)} eta)`} diff --git a/packages/core/src/utils/chains.test.ts b/packages/core/src/utils/chains.test.ts deleted file mode 100644 index 7de3eb2f0..000000000 --- a/packages/core/src/utils/chains.test.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { test } from "vitest"; - -import { chains } from "./chains.js"; - -test("test", () => { - // Will throw on error - Object.values(chains).map((c) => c.id); -}); diff --git a/packages/core/src/utils/chains.ts b/packages/core/src/utils/chains.ts deleted file mode 100644 index 640baa2b3..000000000 --- a/packages/core/src/utils/chains.ts +++ /dev/null @@ -1,3 +0,0 @@ -import * as _chains from "viem/chains"; - -export const chains = _chains as unknown as Record; diff --git a/packages/core/src/utils/checkpoint.test.ts b/packages/core/src/utils/checkpoint.test.ts index bf8baec79..f286514f4 100644 --- a/packages/core/src/utils/checkpoint.test.ts +++ b/packages/core/src/utils/checkpoint.test.ts @@ -2,13 +2,14 @@ import { expect, test } from "vitest"; import { type Checkpoint, + MAX_CHECKPOINT, + MAX_CHECKPOINT_STRING, checkpointMax, checkpointMin, decodeCheckpoint, encodeCheckpoint, isCheckpointEqual, isCheckpointGreaterThan, - maxCheckpoint, } from "./checkpoint.js"; test("encodeCheckpoint produces expected encoding", () => { @@ -60,10 +61,10 @@ test("decodeCheckpoint produces expected object", () => { }); test("decodeCheckpoint decodes an encoded maxCheckpoint", () => { - const encoded = encodeCheckpoint(maxCheckpoint); + const encoded = MAX_CHECKPOINT_STRING; const decoded = decodeCheckpoint(encoded); - expect(decoded).toMatchObject(maxCheckpoint); + expect(decoded).toMatchObject(MAX_CHECKPOINT); }); test("isCheckpointEqual returns true if checkpoints are the same", () => { diff --git a/packages/core/src/utils/checkpoint.ts b/packages/core/src/utils/checkpoint.ts index 3040e092e..e6e1060dc 100644 --- a/packages/core/src/utils/checkpoint.ts +++ b/packages/core/src/utils/checkpoint.ts @@ -105,7 +105,7 @@ export const decodeCheckpoint = (checkpoint: string): Checkpoint => { }; }; -export const zeroCheckpoint: Checkpoint = { +export const ZERO_CHECKPOINT: Checkpoint = { blockTimestamp: 0, chainId: 0n, blockNumber: 0n, @@ -114,7 +114,7 @@ export const zeroCheckpoint: Checkpoint = { eventIndex: 0n, }; -export const maxCheckpoint: Checkpoint = { +export const MAX_CHECKPOINT: Checkpoint = { blockTimestamp: 99999_99999, chainId: 9999_9999_9999_9999n, blockNumber: 9999_9999_9999_9999n, @@ -123,6 +123,10 @@ export const maxCheckpoint: Checkpoint = { eventIndex: 9999_9999_9999_9999n, }; +export const ZERO_CHECKPOINT_STRING = encodeCheckpoint(ZERO_CHECKPOINT); +export const MAX_CHECKPOINT_STRING = encodeCheckpoint(MAX_CHECKPOINT); + +/** /** * Returns true if two checkpoints are equal. */ @@ -154,4 +158,14 @@ export const checkpointMin = (...checkpoints: Checkpoint[]) => return isCheckpointGreaterThan(min, checkpoint) ? checkpoint : min; }); -export const LATEST = encodeCheckpoint(maxCheckpoint); +export const LATEST = MAX_CHECKPOINT_STRING; + +/** Compute the minimum checkpoint, filtering out undefined */ +export const min = (...checkpoints: (string | undefined)[]) => { + return checkpoints.reduce((acc, cur) => { + if (cur === undefined) return acc; + if (acc === undefined) return cur; + if (acc < cur) return acc; + return cur; + })!; +}; diff --git a/packages/core/src/utils/chunk.test.ts b/packages/core/src/utils/chunk.test.ts new file mode 100644 index 000000000..fe60f1585 --- /dev/null +++ b/packages/core/src/utils/chunk.test.ts @@ -0,0 +1,14 @@ +import { chunk } from "@/utils/chunk.js"; +import { expect, test } from "vitest"; + +test("chunk", () => { + let result = chunk([1, 2, 3, 4, 5, 6, 7, 8, 9], 3); + expect(result).toStrictEqual([ + [1, 2, 3], + [4, 5, 6], + [7, 8, 9], + ]); + + result = chunk([], 3); + expect(result).toStrictEqual([]); +}); diff --git a/packages/core/src/utils/chunk.ts b/packages/core/src/utils/chunk.ts new file mode 100644 index 000000000..f4de7c21f --- /dev/null +++ b/packages/core/src/utils/chunk.ts @@ -0,0 +1,7 @@ +export const chunk = (array: T[], size: number): T[][] => { + const chunks = []; + for (let i = 0; i < array.length; i += size) { + chunks.push(array.slice(i, i + size)); + } + return chunks; +}; diff --git a/packages/core/src/utils/finality.ts b/packages/core/src/utils/finality.ts new file mode 100644 index 000000000..0b6e77f76 --- /dev/null +++ b/packages/core/src/utils/finality.ts @@ -0,0 +1,40 @@ +import type { Chain } from "viem"; + +/** + * Returns the number of blocks that must pass before a block is considered final. + * Note that a value of `0` indicates that blocks are considered final immediately. + * + * @param network The network to get the finality block count for. + * @returns The finality block count. + */ +export function getFinalityBlockCount({ chain }: { chain: Chain }) { + let finalityBlockCount: number; + switch (chain.id) { + // Mainnet and mainnet testnets. + case 1: + case 3: + case 4: + case 5: + case 42: + case 11155111: + finalityBlockCount = 65; + break; + // Polygon. + case 137: + case 80001: + finalityBlockCount = 200; + break; + // Arbitrum. + case 42161: + case 42170: + case 421611: + case 421613: + finalityBlockCount = 240; + break; + default: + // Assume a 2-second block time, e.g. OP stack chains. + finalityBlockCount = 30; + } + + return finalityBlockCount; +} diff --git a/packages/core/src/utils/generators.test.ts b/packages/core/src/utils/generators.test.ts index a0db1a79d..c46206109 100644 --- a/packages/core/src/utils/generators.test.ts +++ b/packages/core/src/utils/generators.test.ts @@ -1,8 +1,8 @@ import { promiseWithResolvers } from "@ponder/common"; import { expect, test } from "vitest"; -import { mergeAsyncGenerators } from "./generators.js"; +import { bufferAsyncGenerator, mergeAsyncGenerators } from "./generators.js"; -test("mergeAsyncGenerators", async () => { +test("mergeAsyncGenerators()", async () => { const p1 = promiseWithResolvers(); const p2 = promiseWithResolvers(); const p3 = promiseWithResolvers(); @@ -37,7 +37,7 @@ test("mergeAsyncGenerators", async () => { expect(results).toStrictEqual([1, 2, 3, 4]); }); -test("mergeAsyncGenerators results", async () => { +test("mergeAsyncGenerators() yields all results", async () => { const p1 = promiseWithResolvers(); const p2 = promiseWithResolvers(); const p3 = promiseWithResolvers(); @@ -78,3 +78,82 @@ test("mergeAsyncGenerators results", async () => { expect(results).toStrictEqual([1, 2, 3, 4]); }); + +test("bufferAsyncGenerator() prefetches results", async () => { + let sum = 0; + + async function* inputGenerator() { + yield; + sum += 1; + yield; + sum += 1; + yield; + sum += 1; + yield; + sum += 1; + } + + const generator = bufferAsyncGenerator(inputGenerator(), 2); + + let result = await generator.next(); + expect(result.done).toBe(false); + expect(sum).toBe(2); + + result = await generator.next(); + expect(result.done).toBe(false); + expect(sum).toBe(3); + + result = await generator.next(); + expect(result.done).toBe(false); + expect(sum).toBe(4); + + result = await generator.next(); + expect(result.done).toBe(false); + expect(sum).toBe(4); + + result = await generator.next(); + expect(result.done).toBe(true); +}); + +test("bufferAsyncGenerator() yields all results", async () => { + const p1 = promiseWithResolvers(); + const p2 = promiseWithResolvers(); + const p3 = promiseWithResolvers(); + const p4 = promiseWithResolvers(); + + async function* inputGenerator() { + yield await p1.promise; + yield await p2.promise; + yield await p3.promise; + yield await p4.promise; + } + + const generator = bufferAsyncGenerator(inputGenerator(), 2); + + let resultPromise = generator.next(); + p1.resolve(1); + let result = await resultPromise; + expect(result.done).toBe(false); + expect(result.value).toBe(1); + + resultPromise = generator.next(); + p2.resolve(2); + result = await resultPromise; + expect(result.done).toBe(false); + expect(result.value).toBe(2); + + resultPromise = generator.next(); + p3.resolve(3); + result = await resultPromise; + expect(result.done).toBe(false); + expect(result.value).toBe(3); + + resultPromise = generator.next(); + p4.resolve(4); + result = await resultPromise; + expect(result.done).toBe(false); + expect(result.value).toBe(4); + + result = await generator.next(); + expect(result.done).toBe(true); +}); diff --git a/packages/core/src/utils/generators.ts b/packages/core/src/utils/generators.ts index dd4ee6145..9b3f95bb7 100644 --- a/packages/core/src/utils/generators.ts +++ b/packages/core/src/utils/generators.ts @@ -1,5 +1,11 @@ import { promiseWithResolvers } from "@ponder/common"; +/** + * Merges multiple async generators into a single async generator. + * + * @param generators - The generators to merge. + * @returns A single async generator that yields results from all input generators. + */ export async function* mergeAsyncGenerators( generators: AsyncGenerator[], ): AsyncGenerator { @@ -25,3 +31,63 @@ export async function* mergeAsyncGenerators( } } } + +/** + * Buffers the results of an async generator. + * + * @param generator - The generator to buffer. + * @param size - The size of the buffer. + * @returns An async generator that yields results from the input generator. + */ +export async function* bufferAsyncGenerator( + generator: AsyncGenerator, + size: number, +): AsyncGenerator { + const buffer: T[] = []; + let done = false; + + let pwr1 = promiseWithResolvers(); + let pwr2 = promiseWithResolvers(); + + (async () => { + for await (const result of generator) { + buffer.push(result); + + pwr1.resolve(); + + if (buffer.length > size) await pwr2.promise; + pwr2 = promiseWithResolvers(); + } + done = true; + pwr1.resolve(); + })(); + + while (done === false || buffer.length > 0) { + if (buffer.length > 0) { + pwr2.resolve(); + + yield buffer.shift()!; + } else { + await pwr1.promise; + pwr1 = promiseWithResolvers(); + } + } +} + +/** + * Drains an async generator into an array. + * + * @param asyncGenerator - The async generator to drain. + * @returns An array of results from the input generator. + */ +export async function drainAsyncGenerator( + asyncGenerator: AsyncGenerator, +): Promise { + const result: T[] = []; + + for await (const events of asyncGenerator) { + result.push(events); + } + + return result; +} diff --git a/packages/core/src/utils/partition.test.ts b/packages/core/src/utils/partition.test.ts new file mode 100644 index 000000000..d7526f6fd --- /dev/null +++ b/packages/core/src/utils/partition.test.ts @@ -0,0 +1,16 @@ +import { partition } from "@/utils/partition.js"; +import { expect, test } from "vitest"; + +test("partition", () => { + let [left, right] = partition([1, 2, 3, 4, 5], (n) => n <= 2); + expect(left).toStrictEqual([1, 2]); + expect(right).toStrictEqual([3, 4, 5]); + + [left, right] = partition([1, 2, 3, 4, 5], (n) => n < 6); + expect(left).toStrictEqual([1, 2, 3, 4, 5]); + expect(right).toStrictEqual([]); + + [left, right] = partition([2, 5], (n) => n <= 5); + expect(left).toStrictEqual([2, 5]); + expect(right).toStrictEqual([]); +}); diff --git a/packages/core/src/utils/partition.ts b/packages/core/src/utils/partition.ts new file mode 100644 index 000000000..ab4701901 --- /dev/null +++ b/packages/core/src/utils/partition.ts @@ -0,0 +1,41 @@ +/** + * Divides an array into two arrays, where the first array + * contains all elements that satisfy the predicate, and the + * second array contains all elements that do not satisfy the + * predicate. + * + * Note: It is assumed that the array is sorted. + * + * @param array - The array to partition. + * @param predicate - The predicate to partition the array by. + * + * @returns A tuple containing the left and right arrays. + * + * @example + * ```ts + * const [left, right] = partition([1, 2, 3, 4, 5], (n) => n <= 2); + * // left = [1, 2] + * // right = [3, 4, 5] + * ``` + */ +export const partition = ( + array: T[], + predicate: (item: T) => boolean, +): [T[], T[]] => { + let low = 0; + let high = array.length; + + while (low < high) { + const mid = Math.floor((low + high) / 2); + if (predicate(array[mid]!)) { + low = mid + 1; + } else { + high = mid; + } + } + + const left = array.slice(0, low); + const right = array.slice(low); + + return [left, right]; +}; diff --git a/packages/core/src/utils/requestQueue.test.ts b/packages/core/src/utils/requestQueue.test.ts deleted file mode 100644 index 317e11715..000000000 --- a/packages/core/src/utils/requestQueue.test.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { setupAnvil, setupCommon } from "@/_test/setup.js"; -import { getNetwork } from "@/_test/utils.js"; -import type { Common } from "@/internal/common.js"; -import type { Network } from "@/internal/types.js"; -import { beforeEach, expect, test } from "vitest"; -import { createRequestQueue } from "./requestQueue.js"; - -beforeEach(setupCommon); -beforeEach(setupAnvil); - -/** Creates a request queue with a `maxRequestsPerSecond` of 1. */ -const getQueue = (network: Network, common: Common) => { - return createRequestQueue({ - network: { ...network, maxRequestsPerSecond: 1 }, - common, - }); -}; - -test("requests", async ({ common }) => { - const network = getNetwork(); - - const queue = getQueue(network, common); - queue.start(); - - const chainId = await queue.request({ method: "eth_chainId" }); - - expect(chainId).toBe("0x1"); -}); diff --git a/packages/core/src/utils/requestQueue.ts b/packages/core/src/utils/requestQueue.ts deleted file mode 100644 index a5a40f913..000000000 --- a/packages/core/src/utils/requestQueue.ts +++ /dev/null @@ -1,178 +0,0 @@ -import type { Common } from "@/internal/common.js"; -import type { Network } from "@/internal/types.js"; -import { type Queue, createQueue } from "@ponder/common"; -import { - type GetLogsRetryHelperParameters, - getLogsRetryHelper, -} from "@ponder/utils"; -import { - type EIP1193Parameters, - HttpRequestError, - JsonRpcVersionUnsupportedError, - MethodNotFoundRpcError, - MethodNotSupportedRpcError, - ParseRpcError, - type PublicRpcSchema, - type RpcError, - isHex, -} from "viem"; -import type { DebugRpcSchema } from "./debug.js"; -import { startClock } from "./timer.js"; -import { wait } from "./wait.js"; - -type Schema = [...PublicRpcSchema, ...DebugRpcSchema]; - -type RequestReturnType["method"]> = - Extract["ReturnType"]; - -export type RequestQueue = Omit< - Queue< - RequestReturnType["method"]>, - EIP1193Parameters - >, - "add" -> & { - request: >( - parameters: TParameters, - ) => Promise>; -}; - -const RETRY_COUNT = 9; -const BASE_DURATION = 125; - -/** - * Creates a queue built to manage rpc requests. - */ -export const createRequestQueue = ({ - network, - common, -}: { - network: Network; - common: Common; -}): RequestQueue => { - // @ts-ignore - const fetchRequest = async (request: EIP1193Parameters) => { - for (let i = 0; i <= RETRY_COUNT; i++) { - try { - const stopClock = startClock(); - common.logger.trace({ - service: "rpc", - msg: `Sent ${request.method} request (params=${JSON.stringify(request.params)})`, - }); - const response = await network.transport.request(request); - common.logger.trace({ - service: "rpc", - msg: `Received ${request.method} response (duration=${stopClock()}, params=${JSON.stringify(request.params)})`, - }); - common.metrics.ponder_rpc_request_duration.observe( - { method: request.method, network: network.name }, - stopClock(), - ); - - return response; - } catch (_error) { - const error = _error as Error; - - if ( - request.method === "eth_getLogs" && - isHex(request.params[0].fromBlock) && - isHex(request.params[0].toBlock) - ) { - const getLogsErrorResponse = getLogsRetryHelper({ - params: request.params as GetLogsRetryHelperParameters["params"], - error: error as RpcError, - }); - - if (getLogsErrorResponse.shouldRetry === true) throw error; - } - - if (shouldRetry(error) === false) { - common.logger.warn({ - service: "rpc", - msg: `Failed ${request.method} request`, - }); - throw error; - } - - if (i === RETRY_COUNT) { - common.logger.warn({ - service: "rpc", - msg: `Failed ${request.method} request after ${i + 1} attempts`, - error, - }); - throw error; - } - - const duration = BASE_DURATION * 2 ** i; - common.logger.debug({ - service: "rpc", - msg: `Failed ${request.method} request, retrying after ${duration} milliseconds`, - error, - }); - await wait(duration); - } - } - }; - - const requestQueue: Queue< - unknown, - { - request: EIP1193Parameters; - stopClockLag: () => number; - } - > = createQueue({ - frequency: network.maxRequestsPerSecond, - concurrency: Math.ceil(network.maxRequestsPerSecond / 4), - initialStart: true, - browser: false, - worker: async (task: { - request: EIP1193Parameters; - stopClockLag: () => number; - }) => { - common.metrics.ponder_rpc_request_lag.observe( - { method: task.request.method, network: network.name }, - task.stopClockLag(), - ); - - return await fetchRequest(task.request); - }, - }); - - return { - ...requestQueue, - request: >( - params: TParameters, - ) => { - const stopClockLag = startClock(); - - return requestQueue.add({ request: params, stopClockLag }); - }, - } as RequestQueue; -}; - -/** - * @link https://github.com/wevm/viem/blob/main/src/utils/buildRequest.ts#L192 - */ -function shouldRetry(error: Error) { - if ("code" in error && typeof error.code === "number") { - // Invalid JSON - if (error.code === ParseRpcError.code) return false; - // Method does not exist - if (error.code === MethodNotFoundRpcError.code) return false; - // Method is not implemented - if (error.code === MethodNotSupportedRpcError.code) return false; - // Version of JSON-RPC protocol is not supported - if (error.code === JsonRpcVersionUnsupportedError.code) return false; - } - if (error instanceof HttpRequestError && error.status) { - // Method Not Allowed - if (error.status === 405) return false; - // Not Found - if (error.status === 404) return false; - // Not Implemented - if (error.status === 501) return false; - // HTTP Version Not Supported - if (error.status === 505) return false; - } - return true; -} diff --git a/packages/core/src/utils/rpc.ts b/packages/core/src/utils/rpc.ts index 5d3d31cec..73e760c08 100644 --- a/packages/core/src/utils/rpc.ts +++ b/packages/core/src/utils/rpc.ts @@ -1,3 +1,4 @@ +import type { RPC } from "@/rpc/index.js"; import type { SyncBlock, SyncLog, @@ -5,7 +6,6 @@ import type { SyncTransactionReceipt, } from "@/types/sync.js"; import { toLowerCase } from "@/utils/lowercase.js"; -import type { RequestQueue } from "@/utils/requestQueue.js"; import { type Address, BlockNotFoundError, @@ -20,7 +20,7 @@ import { * Helper function for "eth_getBlockByNumber" request. */ export const _eth_getBlockByNumber = ( - requestQueue: RequestQueue, + rpc: RPC, { blockNumber, blockTag, @@ -28,7 +28,7 @@ export const _eth_getBlockByNumber = ( | { blockNumber: Hex | number; blockTag?: never } | { blockNumber?: never; blockTag: "latest" }, ): Promise => - requestQueue + rpc .request({ method: "eth_getBlockByNumber", params: [ @@ -50,10 +50,10 @@ export const _eth_getBlockByNumber = ( * Helper function for "eth_getBlockByNumber" request. */ export const _eth_getBlockByHash = ( - requestQueue: RequestQueue, + rpc: RPC, { hash }: { hash: Hex }, ): Promise => - requestQueue + rpc .request({ method: "eth_getBlockByHash", params: [hash, true], @@ -71,7 +71,7 @@ export const _eth_getBlockByHash = ( * Handles different error types and retries the request if applicable. */ export const _eth_getLogs = async ( - requestQueue: RequestQueue, + rpc: RPC, params: { address?: Address | Address[]; topics?: LogTopic[]; @@ -81,7 +81,7 @@ export const _eth_getLogs = async ( ), ): Promise => { if ("blockHash" in params) { - return requestQueue + return rpc .request({ method: "eth_getLogs", params: [ @@ -100,7 +100,7 @@ export const _eth_getLogs = async ( .then((l) => l as SyncLog[]); } - return requestQueue + return rpc .request({ method: "eth_getLogs", params: [ @@ -130,10 +130,10 @@ export const _eth_getLogs = async ( * Helper function for "eth_getTransactionReceipt" request. */ export const _eth_getTransactionReceipt = ( - requestQueue: RequestQueue, + rpc: RPC, { hash }: { hash: Hex }, ): Promise => - requestQueue + rpc .request({ method: "eth_getTransactionReceipt", params: [hash], @@ -150,10 +150,10 @@ export const _eth_getTransactionReceipt = ( * Helper function for "eth_getBlockReceipts" request. */ export const _eth_getBlockReceipts = ( - requestQueue: RequestQueue, + rpc: RPC, { blockHash }: { blockHash: Hash }, ): Promise => - requestQueue + rpc .request({ method: "eth_getBlockReceipts", params: [blockHash], @@ -164,14 +164,14 @@ export const _eth_getBlockReceipts = ( * Helper function for "debug_traceBlockByNumber" request. */ export const _debug_traceBlockByNumber = ( - requestQueue: RequestQueue, + rpc: RPC, { blockNumber, }: { blockNumber: Hex | number; }, ): Promise => - requestQueue + rpc .request({ method: "debug_traceBlockByNumber", params: [ @@ -238,14 +238,14 @@ export const _debug_traceBlockByNumber = ( * Helper function for "debug_traceBlockByHash" request. */ export const _debug_traceBlockByHash = ( - requestQueue: RequestQueue, + rpc: RPC, { hash, }: { hash: Hash; }, ): Promise => - requestQueue + rpc .request({ method: "debug_traceBlockByHash", params: [hash, { tracer: "callTracer" }], diff --git a/packages/core/src/utils/rpcUrl.test.ts b/packages/core/src/utils/rpcUrl.test.ts new file mode 100644 index 000000000..8a15220f3 --- /dev/null +++ b/packages/core/src/utils/rpcUrl.test.ts @@ -0,0 +1,21 @@ +import { mainnet } from "viem/chains"; +import { expect, test } from "vitest"; +import { isRpcUrlPublic } from "./rpcUrl.js"; + +test("isPublicRpcUrl returns true for Cloudflare public RPC URL", () => { + const isPublic = isRpcUrlPublic({ + chain: mainnet, + rpcUrl: "https://cloudflare-eth.com", + }); + + expect(isPublic).toBe(true); +}); + +test("isPublicRpcUrl returns false for Alchemy RPC URL", () => { + const isPublic = isRpcUrlPublic({ + chain: mainnet, + rpcUrl: "https://eth-mainnet.g.alchemy.com/v2/abc", + }); + + expect(isPublic).toBe(false); +}); diff --git a/packages/core/src/utils/rpcUrl.ts b/packages/core/src/utils/rpcUrl.ts new file mode 100644 index 000000000..cf884ed72 --- /dev/null +++ b/packages/core/src/utils/rpcUrl.ts @@ -0,0 +1,25 @@ +import type { Chain } from "viem"; + +/** + * Returns `true` if the RPC URL is found in the list of public RPC URLs + * included in viem/chains. Handles both HTTP and WebSocket RPC URLs. + * + * @returns Boolean indicating if the RPC URL is public. + */ +export function isRpcUrlPublic({ + chain, + rpcUrl, +}: { + chain: Chain; + rpcUrl: string; +}) { + for (const http of chain.rpcUrls.default.http) { + if (http === rpcUrl) return true; + } + + for (const webSocket of chain.rpcUrls.default.webSocket ?? []) { + if (webSocket === rpcUrl) return true; + } + + return false; +} diff --git a/packages/core/src/utils/zipper.test.ts b/packages/core/src/utils/zipper.test.ts new file mode 100644 index 000000000..9782f0240 --- /dev/null +++ b/packages/core/src/utils/zipper.test.ts @@ -0,0 +1,15 @@ +import { zipper, zipperMany } from "@/utils/zipper.js"; +import { expect, test } from "vitest"; +test("zipper", () => { + const result = zipper([1, 3, 5], [2, 4, 6]); + expect(result).toStrictEqual([1, 2, 3, 4, 5, 6]); +}); + +test("zipperMany", () => { + const result = zipperMany([ + [1, 3, 5], + [2, 4, 6], + [7, 8, 9], + ]); + expect(result).toStrictEqual([1, 2, 3, 4, 5, 6, 7, 8, 9]); +}); diff --git a/packages/core/src/utils/zipper.ts b/packages/core/src/utils/zipper.ts new file mode 100644 index 000000000..1bb065ad8 --- /dev/null +++ b/packages/core/src/utils/zipper.ts @@ -0,0 +1,80 @@ +/** + * Merges two sorted arrays into a single sorted array. + * + * @param array1 - The first array to merge. + * @param array2 - The second array to merge. + * @param compare - The comparison function to use. + * + * @returns The merged array. + * + * @example + * ```ts + * const result = zipper([1, 3, 5], [2, 4, 6]); + * // result = [1, 2, 3, 4, 5, 6] + * ``` + */ +export const zipper = ( + array1: T[], + array2: T[], + compare?: (a: T, b: T) => number, +): T[] => { + const result: T[] = []; + let i = 0; + let j = 0; + + while (i < array1.length && j < array2.length) { + if ( + compare ? compare(array1[i]!, array2[j]!) < 0 : array1[i]! < array2[j]! + ) { + result.push(array1[i]!); + i++; + } else { + result.push(array2[j]!); + j++; + } + } + + if (i < array1.length) { + result.push(...array1.slice(i)); + } + + if (j < array2.length) { + result.push(...array2.slice(j)); + } + + return result; +}; + +/** + * Merges many sorted arrays into a single sorted array. + * + * @param arrays - The arrays to merge. + * @param compare - The comparison function to use. + * + * @returns The merged array. + * + * @example + * ```ts + * const result = zipperMany([ + * [1, 3, 5], + * [2, 4, 6], + * [7, 8, 9], + * ]); + * // result = [1, 2, 3, 4, 5, 6, 7, 8, 9] + * ``` + */ +export const zipperMany = ( + arrays: T[][], + compare?: (a: T, b: T) => number, +): T[] => { + if (arrays.length === 0) return []; + if (arrays.length === 1) return arrays[0]!; + + let result: T[] = arrays[0]!; + + for (let i = 1; i < arrays.length; i++) { + result = zipper(result, arrays[i]!, compare); + } + + return result; +}; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 50caea96c..2206e6fce 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -826,13 +826,13 @@ importers: version: 2.4.0 '@hono/node-server': specifier: 1.13.3 - version: 1.13.3 + version: 1.13.3(hono@4.5.0) '@ponder/utils': specifier: workspace:* version: link:../utils abitype: specifier: ^0.10.2 - version: 0.10.3(zod@3.23.8) + version: 0.10.3(typescript@5.3.3)(zod@3.23.8) commander: specifier: ^12.0.0 version: 12.0.0 @@ -904,7 +904,7 @@ importers: version: 1.0.2(@types/node@20.11.24) vite-tsconfig-paths: specifier: 4.3.1 - version: 4.3.1(vite@5.0.7(@types/node@20.11.24)) + version: 4.3.1(typescript@5.3.3)(vite@5.0.7(@types/node@20.11.24)) devDependencies: '@pgsql/types': specifier: 16.0.0 @@ -929,7 +929,7 @@ importers: version: 0.0.6 '@wagmi/cli': specifier: ^1.5.2 - version: 1.5.2 + version: 1.5.2(typescript@5.3.3) execa: specifier: ^8.0.1 version: 8.0.1 @@ -938,7 +938,7 @@ importers: version: 5.0.5 tsup: specifier: ^8.0.1 - version: 8.0.1(postcss@8.4.32) + version: 8.0.1(postcss@8.4.32)(ts-node@10.9.2(@types/node@20.11.24)(typescript@5.3.3))(typescript@5.3.3) vitest: specifier: ^1.0.2 version: 1.0.2(@types/node@20.11.24) @@ -7826,6 +7826,7 @@ packages: shikiji@0.6.10: resolution: {integrity: sha512-WE+A5Y2ntM5hL3iJQujk97qr5Uj7PSIRXpQfrZ6h+JWPXZ8KBEDhFXc4lqNriaRq1WGOVPUT83XMOzmHiH3W8A==} + deprecated: Shikiji is merged back to Shiki v1.0, please migrate over to get the latest updates side-channel@1.0.4: resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} @@ -10555,7 +10556,9 @@ snapshots: react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - '@hono/node-server@1.13.3': {} + '@hono/node-server@1.13.3(hono@4.5.0)': + dependencies: + hono: 4.5.0 '@hono/trpc-server@0.3.2(@trpc/server@10.45.2)(hono@4.5.0)': dependencies: @@ -11417,34 +11420,6 @@ snapshots: loupe: 2.3.7 pretty-format: 29.7.0 - '@wagmi/cli@1.5.2': - dependencies: - abitype: 0.8.7(zod@3.23.8) - abort-controller: 3.0.0 - bundle-require: 3.1.2(esbuild@0.16.17) - cac: 6.7.14 - change-case: 4.1.2 - chokidar: 3.6.0 - dedent: 0.7.0 - detect-package-manager: 2.0.1 - dotenv: 16.4.5 - dotenv-expand: 10.0.0 - esbuild: 0.16.17 - execa: 6.1.0 - find-up: 6.3.0 - fs-extra: 10.1.0 - globby: 13.2.2 - node-fetch: 3.3.2 - ora: 6.3.1 - pathe: 1.1.1 - picocolors: 1.0.0 - prettier: 2.8.8 - viem: 1.21.4(zod@3.23.8) - zod: 3.23.8 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - '@wagmi/cli@1.5.2(typescript@5.3.3)': dependencies: abitype: 0.8.7(typescript@5.3.3)(zod@3.23.8) @@ -11528,29 +11503,17 @@ snapshots: typescript: 5.3.3 zod: 3.23.8 - abitype@0.10.3(zod@3.23.8): - optionalDependencies: - zod: 3.23.8 - abitype@0.8.7(typescript@5.3.3)(zod@3.23.8): dependencies: typescript: 5.3.3 optionalDependencies: zod: 3.23.8 - abitype@0.8.7(zod@3.23.8): - optionalDependencies: - zod: 3.23.8 - abitype@0.9.8(typescript@5.3.3)(zod@3.23.8): optionalDependencies: typescript: 5.3.3 zod: 3.23.8 - abitype@0.9.8(zod@3.23.8): - optionalDependencies: - zod: 3.23.8 - abitype@1.0.5(typescript@5.0.4)(zod@3.23.8): optionalDependencies: typescript: 5.0.4 @@ -12623,10 +12586,6 @@ snapshots: optionalDependencies: supports-color: 8.1.1 - debug@4.3.7: - dependencies: - ms: 2.1.3 - debug@4.3.7(supports-color@8.1.1): dependencies: ms: 2.1.3 @@ -13591,8 +13550,6 @@ snapshots: focus-visible@5.2.0: {} - follow-redirects@1.15.6: {} - follow-redirects@1.15.6(debug@4.3.4): optionalDependencies: debug: 4.3.4(supports-color@8.1.1) @@ -14107,7 +14064,7 @@ snapshots: http-proxy@1.18.1: dependencies: eventemitter3: 4.0.7 - follow-redirects: 1.15.6 + follow-redirects: 1.15.6(debug@4.3.4) requires-port: 1.0.0 transitivePeerDependencies: - debug @@ -16274,13 +16231,6 @@ snapshots: camelcase-css: 2.0.1 postcss: 8.4.32 - postcss-load-config@4.0.2(postcss@8.4.32): - dependencies: - lilconfig: 3.1.2 - yaml: 2.3.4 - optionalDependencies: - postcss: 8.4.32 - postcss-load-config@4.0.2(postcss@8.4.32)(ts-node@10.9.2(@types/node@20.11.24)(typescript@5.3.3)): dependencies: lilconfig: 3.1.2 @@ -17453,7 +17403,9 @@ snapshots: yn: 3.1.1 optional: true - tsconfck@3.0.1: {} + tsconfck@3.0.1(typescript@5.3.3): + optionalDependencies: + typescript: 5.3.3 tsconfig-paths@3.15.0: dependencies: @@ -17464,28 +17416,6 @@ snapshots: tslib@2.6.2: {} - tsup@8.0.1(postcss@8.4.32): - dependencies: - bundle-require: 4.0.2(esbuild@0.19.11) - cac: 6.7.14 - chokidar: 3.6.0 - debug: 4.3.7 - esbuild: 0.19.11 - execa: 5.1.1 - globby: 11.1.0 - joycon: 3.1.1 - postcss-load-config: 4.0.2(postcss@8.4.32) - resolve-from: 5.0.0 - rollup: 4.9.2 - source-map: 0.8.0-beta.0 - sucrase: 3.35.0 - tree-kill: 1.2.2 - optionalDependencies: - postcss: 8.4.32 - transitivePeerDependencies: - - supports-color - - ts-node - tsup@8.0.1(postcss@8.4.32)(ts-node@10.9.2(@types/node@20.11.24)(typescript@5.3.3))(typescript@5.3.3): dependencies: bundle-require: 4.0.2(esbuild@0.19.11) @@ -17861,21 +17791,6 @@ snapshots: - utf-8-validate - zod - viem@1.21.4(zod@3.23.8): - dependencies: - '@adraffy/ens-normalize': 1.10.0 - '@noble/curves': 1.2.0 - '@noble/hashes': 1.3.2 - '@scure/bip32': 1.3.2 - '@scure/bip39': 1.2.1 - abitype: 0.9.8(zod@3.23.8) - isows: 1.0.3(ws@8.13.0) - ws: 8.13.0 - transitivePeerDependencies: - - bufferutil - - utf-8-validate - - zod - viem@2.21.3(typescript@5.0.4)(zod@3.23.8): dependencies: '@adraffy/ens-normalize': 1.10.0 @@ -17915,7 +17830,7 @@ snapshots: vite-node@1.0.2(@types/node@20.11.24): dependencies: cac: 6.7.14 - debug: 4.3.7 + debug: 4.3.7(supports-color@8.1.1) pathe: 1.1.1 picocolors: 1.0.0 vite: 5.0.7(@types/node@20.11.24) @@ -17929,11 +17844,11 @@ snapshots: - supports-color - terser - vite-tsconfig-paths@4.3.1(vite@5.0.7(@types/node@20.11.24)): + vite-tsconfig-paths@4.3.1(typescript@5.3.3)(vite@5.0.7(@types/node@20.11.24)): dependencies: - debug: 4.3.7 + debug: 4.3.7(supports-color@8.1.1) globrex: 0.1.2 - tsconfck: 3.0.1 + tsconfck: 3.0.1(typescript@5.3.3) optionalDependencies: vite: 5.0.7(@types/node@20.11.24) transitivePeerDependencies: @@ -17959,7 +17874,7 @@ snapshots: acorn-walk: 8.3.1 cac: 6.7.14 chai: 4.3.10 - debug: 4.3.7 + debug: 4.3.7(supports-color@8.1.1) execa: 8.0.1 local-pkg: 0.5.0 magic-string: 0.30.5