From 7bfe63c1f941fd51a69b42dd36e94036afd57542 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:40:12 +0800 Subject: [PATCH 01/34] feat: define BlockFetcher interface and extract RpcBlockFetcher --- src/providers/evm/fetchers/index.ts | 2 + src/providers/evm/fetchers/rpc.ts | 230 +++++++++++++++++++ src/providers/evm/fetchers/types.ts | 31 +++ test/unit/providers/evm/fetchers/rpc.test.ts | 8 + 4 files changed, 271 insertions(+) create mode 100644 src/providers/evm/fetchers/index.ts create mode 100644 src/providers/evm/fetchers/rpc.ts create mode 100644 src/providers/evm/fetchers/types.ts create mode 100644 test/unit/providers/evm/fetchers/rpc.test.ts diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts new file mode 100644 index 0000000..35e10e3 --- /dev/null +++ b/src/providers/evm/fetchers/index.ts @@ -0,0 +1,2 @@ +export type { BlockFetcher, FetchedBlock } from './types'; +export { RpcBlockFetcher } from './rpc'; diff --git a/src/providers/evm/fetchers/rpc.ts b/src/providers/evm/fetchers/rpc.ts new file mode 100644 index 0000000..d56f5d0 --- /dev/null +++ b/src/providers/evm/fetchers/rpc.ts @@ -0,0 +1,230 @@ +import { + createPublicClient, + formatLog, + http, + Log, + PublicClient, + RpcLog +} from 'viem'; +import { BlockFetcher, FetchedBlock } from './types'; +import { getRangeHint } from '../helpers'; +import { CustomJsonRpcError } from '../types'; +import { CheckpointRecord } from '../../../stores/checkpoints'; +import { ContractSourceConfig } from '../../../types'; +import { sleep } from '../../../utils/helpers'; + +type GetLogsBlockHashFilter = { + blockHash: string; +}; + +type GetLogsBlockRangeFilter = { + fromBlock: number; + toBlock: number; +}; + +/** + * Timeout for client requests in milliseconds. + * This timeout is also used when fetching latest blocks in getLogs. + */ +const CLIENT_TIMEOUT = 5 * 1000; + +const MAX_BLOCKS_PER_REQUEST = 10000; + +export class RpcBlockFetcher implements BlockFetcher { + private readonly client: PublicClient; + private readonly networkNodeUrl: string; + + constructor(networkNodeUrl: string) { + this.networkNodeUrl = networkNodeUrl; + this.client = createPublicClient({ + transport: http(networkNodeUrl, { + timeout: CLIENT_TIMEOUT + }) + }); + } + + async getChainId(): Promise { + return this.client.getChainId(); + } + + async getLatestBlockNumber(): Promise { + const blockNumber = await this.client.getBlockNumber(); + + return Number(blockNumber); + } + + async getBlock(blockNumber: number): Promise { + const block = await this.client.getBlock({ + blockNumber: BigInt(blockNumber) + }); + + return { + number: Number(block.number), + hash: block.hash, + parentHash: block.parentHash, + timestamp: Number(block.timestamp) + }; + } + + async getBlockHash(blockNumber: number): Promise { + const block = await this.client.getBlock({ + blockNumber: BigInt(blockNumber) + }); + + return block.hash; + } + + /** + * This method is a simpler implementation of getLogs method. + * This allows using two filters that are not supported in ethers v5: + * - `blockHash` to get logs for a specific block - if node doesn't know about that block it will fail. + * - `address` as a single address or an array of addresses. + * @param filter Logs filter + */ + private async _getLogs( + filter: (GetLogsBlockHashFilter | GetLogsBlockRangeFilter) & { + address?: string | string[]; + topics?: (string | string[])[]; + } + ): Promise { + const params: { + fromBlock?: string; + toBlock?: string; + blockHash?: string; + address?: string | string[]; + topics?: (string | string[])[]; + } = {}; + + let signal: AbortSignal | undefined; + + if ('blockHash' in filter) { + signal = AbortSignal.timeout(CLIENT_TIMEOUT); + params.blockHash = filter.blockHash; + } + + if ('fromBlock' in filter) { + params.fromBlock = `0x${filter.fromBlock.toString(16)}`; + } + + if ('toBlock' in filter) { + params.toBlock = `0x${filter.toBlock.toString(16)}`; + } + + if ('address' in filter) { + params.address = filter.address; + } + + if ('topics' in filter) { + params.topics = filter.topics; + } + + const res = await fetch(this.networkNodeUrl, { + method: 'POST', + signal, + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + jsonrpc: '2.0', + id: 1, + method: 'eth_getLogs', + params: [params] + }) + }); + + if (!res.ok) { + throw new Error(`Request failed: ${res.statusText}`); + } + + const json = await res.json(); + + if (json.error) { + throw new CustomJsonRpcError( + json.error.message, + json.error.code, + json.error.data + ); + } + + return json.result.map((log: RpcLog) => formatLog(log)); + } + + async getLogsByBlockHash(blockHash: string): Promise { + return this._getLogs({ blockHash }); + } + + async getLogs( + fromBlock: number, + toBlock: number, + address: string | string[], + topics: (string | string[])[] = [] + ): Promise { + let result = [] as Log[]; + + let currentFrom = fromBlock; + let currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); + while (true) { + try { + const logs = await this._getLogs({ + fromBlock: currentFrom, + toBlock: currentTo, + address, + topics + }); + + result = result.concat(logs); + + if (currentTo === toBlock) break; + currentFrom = currentTo + 1; + currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); + } catch (err: unknown) { + const rangeHint = getRangeHint(err, { + from: currentFrom, + to: currentTo + }); + + if (rangeHint) { + currentFrom = rangeHint.from; + currentTo = rangeHint.to; + continue; + } + + await sleep(5000); + } + } + + return result; + } + + async getCheckpointsRange( + fromBlock: number, + toBlock: number, + sources: ContractSourceConfig[], + getEventHash: (name: string) => string + ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }> { + const chunks: ContractSourceConfig[][] = []; + for (let i = 0; i < sources.length; i += 20) { + chunks.push(sources.slice(i, i + 20)); + } + + let logs: Log[] = []; + for (const chunk of chunks) { + const address = chunk.map(source => source.contract); + const topics = chunk.flatMap(source => + source.events.map(event => getEventHash(event.name)) + ); + + const chunkLogs = await this.getLogs(fromBlock, toBlock, address, [ + topics + ]); + logs = logs.concat(chunkLogs); + } + + const checkpoints = logs.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); + + return { checkpoints, logs }; + } +} diff --git a/src/providers/evm/fetchers/types.ts b/src/providers/evm/fetchers/types.ts new file mode 100644 index 0000000..beca595 --- /dev/null +++ b/src/providers/evm/fetchers/types.ts @@ -0,0 +1,31 @@ +import { Log } from 'viem'; +import { CheckpointRecord } from '../../../stores/checkpoints'; +import { ContractSourceConfig } from '../../../types'; + +export type FetchedBlock = { + number: number; + hash: string; + parentHash: string; + timestamp: number; +}; + +export type BlockFetcher = { + getChainId(): Promise; + getLatestBlockNumber(): Promise; + getBlock(blockNumber: number): Promise; + getBlockHash(blockNumber: number): Promise; + getLogs( + fromBlock: number, + toBlock: number, + address: string | string[], + topics?: (string | string[])[] + ): Promise; + getLogsByBlockHash(blockHash: string): Promise; + getCheckpointsRange( + fromBlock: number, + toBlock: number, + sources: ContractSourceConfig[], + getEventHash: (name: string) => string + ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }>; + getBlockTimestamps?(): Map; +}; diff --git a/test/unit/providers/evm/fetchers/rpc.test.ts b/test/unit/providers/evm/fetchers/rpc.test.ts new file mode 100644 index 0000000..0fba556 --- /dev/null +++ b/test/unit/providers/evm/fetchers/rpc.test.ts @@ -0,0 +1,8 @@ +import { RpcBlockFetcher } from '../../../../../src/providers/evm/fetchers/rpc'; + +describe('RpcBlockFetcher', () => { + it('should be instantiated with a network URL', () => { + const fetcher = new RpcBlockFetcher('https://rpc.example.com'); + expect(fetcher).toBeDefined(); + }); +}); From f605ea2fbca90d081a3969a9456907e62c6c0c6c Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:43:21 +0800 Subject: [PATCH 02/34] feat: add hypersync_api_token to CheckpointConfig schema --- src/schemas.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/schemas.ts b/src/schemas.ts index 49d3922..17a45fe 100644 --- a/src/schemas.ts +++ b/src/schemas.ts @@ -26,7 +26,8 @@ export const checkpointConfigSchema = z.object({ global_events: z.array(contractEventConfigSchema).optional(), sources: z.array(contractSourceConfigSchema).optional(), templates: z.record(contractTemplateSchema).optional(), - abis: z.record(z.any()).optional() + abis: z.record(z.any()).optional(), + hypersync_api_token: z.string().optional() }); export const overridesConfigSchema = z.object({ From d03233021f044d447671664bd4086ccd1b40d415 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:45:33 +0800 Subject: [PATCH 03/34] refactor: use BlockFetcher abstraction in EvmProvider --- src/providers/evm/indexer.ts | 6 +- src/providers/evm/provider.ts | 229 +++++++--------------------------- 2 files changed, 48 insertions(+), 187 deletions(-) diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index f848839..e892ae1 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,5 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; +import { RpcBlockFetcher } from './fetchers/rpc'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -20,11 +21,14 @@ export class EvmIndexer extends BaseIndexer { log: Logger; abis?: Record; }) { + const fetcher = new RpcBlockFetcher(instance.config.network_node_url); + this.provider = new EvmProvider({ instance, log, abis, - writers: this.writers + writers: this.writers, + fetcher }); } diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index f8207ab..d0b9da9 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -1,63 +1,38 @@ import { - createPublicClient, - formatLog, getAddress, - http, keccak256, Log, parseEventLogs, ParseEventLogsReturnType, - PublicClient, - RpcLog, stringToBytes } from 'viem'; -import { getRangeHint } from './helpers'; +import { BlockFetcher } from './fetchers/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; -import { sleep } from '../../utils/helpers'; import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; -type GetLogsBlockHashFilter = { - blockHash: string; -}; - -type GetLogsBlockRangeFilter = { - fromBlock: number; - toBlock: number; -}; - -/** - * Timeout for client requests in milliseconds. - * This timeout is also used when fetching latest blocks in getLogs. - */ -const CLIENT_TIMEOUT = 5 * 1000; - -const MAX_BLOCKS_PER_REQUEST = 10000; - export class EvmProvider extends BaseProvider { - private readonly client: PublicClient; + private readonly fetcher: BlockFetcher; private readonly writers: Record; private sourceHashes = new Map(); private logsCache = new Map(); + blockTimestampCache = new Map(); constructor({ instance, log, abis, - writers + writers, + fetcher }: ConstructorParameters[0] & { writers: Record; + fetcher: BlockFetcher; }) { super({ instance, log, abis }); - this.client = createPublicClient({ - transport: http(instance.config.network_node_url, { - timeout: CLIENT_TIMEOUT - }) - }); - + this.fetcher = fetcher; this.writers = writers; } @@ -66,23 +41,17 @@ export class EvmProvider extends BaseProvider { } async getNetworkIdentifier(): Promise { - const chainId = await this.client.getChainId(); + const chainId = await this.fetcher.getChainId(); return `evm_${chainId}`; } async getLatestBlockNumber(): Promise { - const blockNumber = await this.client.getBlockNumber(); - - return Number(blockNumber); + return this.fetcher.getLatestBlockNumber(); } async getBlockHash(blockNumber: number) { - const block = await this.client.getBlock({ - blockNumber: BigInt(blockNumber) - }); - - return block.hash; + return this.fetcher.getBlockHash(blockNumber); } async processBlock(blockNumber: number, parentHash: string | null) { @@ -95,9 +64,15 @@ export class EvmProvider extends BaseProvider { try { if (!hasPreloadedBlockEvents) { - block = await this.client.getBlock({ - blockNumber: BigInt(blockNumber) - }); + const fetched = await this.fetcher.getBlock(blockNumber); + const cachedTimestamp = this.blockTimestampCache.get(blockNumber); + const timestamp = cachedTimestamp ?? fetched.timestamp; + block = { + number: BigInt(blockNumber), + hash: fetched.hash, + parentHash: fetched.parentHash, + timestamp: BigInt(timestamp) + } as Block; } } catch (err) { this.log.error({ blockNumber, err }, 'getting block failed... retrying'); @@ -330,9 +305,7 @@ export class EvmProvider extends BaseProvider { throw new Error('Block hash is required to fetch logs from network'); } - events = await this._getLogs({ - blockHash - }); + events = await this.fetcher.getLogsByBlockHash(blockHash); } return { @@ -349,134 +322,6 @@ export class EvmProvider extends BaseProvider { }; } - /** - * This method is simpler implementation of getLogs method. - * This allows using two filters that are not supported in ethers v5: - * - `blockHash` to get logs for a specific block - if node doesn't know about that block it will fail. - * - `address` as a single address or an array of addresses. - * @param filter Logs filter - */ - private async _getLogs( - filter: (GetLogsBlockHashFilter | GetLogsBlockRangeFilter) & { - address?: string | string[]; - topics?: (string | string[])[]; - } - ): Promise { - const params: { - fromBlock?: string; - toBlock?: string; - blockHash?: string; - address?: string | string[]; - topics?: (string | string[])[]; - } = {}; - - let signal: AbortSignal | undefined; - - if ('blockHash' in filter) { - signal = AbortSignal.timeout(CLIENT_TIMEOUT); - params.blockHash = filter.blockHash; - } - - if ('fromBlock' in filter) { - params.fromBlock = `0x${filter.fromBlock.toString(16)}`; - } - - if ('toBlock' in filter) { - params.toBlock = `0x${filter.toBlock.toString(16)}`; - } - - if ('address' in filter) { - params.address = filter.address; - } - - if ('topics' in filter) { - params.topics = filter.topics; - } - - const res = await fetch(this.instance.config.network_node_url, { - method: 'POST', - signal, - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - jsonrpc: '2.0', - id: 1, - method: 'eth_getLogs', - params: [params] - }) - }); - - if (!res.ok) { - throw new Error(`Request failed: ${res.statusText}`); - } - - const json = await res.json(); - - if (json.error) { - throw new CustomJsonRpcError( - json.error.message, - json.error.code, - json.error.data - ); - } - - return json.result.map((log: RpcLog) => formatLog(log)); - } - - async getLogs( - fromBlock: number, - toBlock: number, - address: string | string[], - topics: (string | string[])[] = [] - ): Promise { - let result = [] as Log[]; - - let currentFrom = fromBlock; - let currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); - while (true) { - try { - const logs = await this._getLogs({ - fromBlock: currentFrom, - toBlock: currentTo, - address, - topics - }); - - result = result.concat(logs); - - if (currentTo === toBlock) break; - currentFrom = currentTo + 1; - currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); - } catch (err: unknown) { - const rangeHint = getRangeHint(err, { - from: currentFrom, - to: currentTo - }); - - if (rangeHint) { - this.log.warn( - { err, rangeHint, fromBlock: currentFrom, toBlock: currentTo }, - 'getLogs failed. Received new range hint' - ); - - currentFrom = rangeHint.from; - currentTo = rangeHint.to; - continue; - } - - this.log.error( - { fromBlock: currentFrom, toBlock: currentTo, address, err }, - 'getLogs failed' - ); - - await sleep(5000); - } - } - - return result; - } - async getLogsForSources({ fromBlock, toBlock, @@ -498,9 +343,12 @@ export class EvmProvider extends BaseProvider { source.events.map(event => this.getEventHash(event.name)) ); - const chunkEvents = await this.getLogs(fromBlock, toBlock, address, [ - topics - ]); + const chunkEvents = await this.fetcher.getLogs( + fromBlock, + toBlock, + address, + [topics] + ); events = events.concat(chunkEvents); } @@ -511,13 +359,14 @@ export class EvmProvider extends BaseProvider { fromBlock: number, toBlock: number ): Promise { - const events = await this.getLogsForSources({ + const { checkpoints, logs } = await this.fetcher.getCheckpointsRange( fromBlock, toBlock, - sources: this.instance.getCurrentSources(toBlock) - }); + this.instance.getCurrentSources(toBlock), + name => this.getEventHash(name) + ); - for (const log of events) { + for (const log of logs) { if (log.blockNumber === null) continue; if (!this.logsCache.has(log.blockNumber)) { @@ -527,10 +376,14 @@ export class EvmProvider extends BaseProvider { this.logsCache.get(log.blockNumber)?.push(log); } - return events.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); + const blockTimestamps = this.fetcher.getBlockTimestamps?.(); + if (blockTimestamps) { + for (const [blockNumber, timestamp] of blockTimestamps) { + this.blockTimestampCache.set(blockNumber, timestamp); + } + } + + return checkpoints; } getEventHash(eventName: string) { @@ -541,6 +394,10 @@ export class EvmProvider extends BaseProvider { return this.sourceHashes.get(eventName) as string; } + cacheBlockTimestamp(blockNumber: number, timestamp: number): void { + this.blockTimestampCache.set(blockNumber, timestamp); + } + compareAddress(a: string, b: string) { return a.toLowerCase() === b.toLowerCase(); } From f57d9b3feb95b07317a1771684edafaf37047942 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:49:31 +0800 Subject: [PATCH 04/34] feat: implement HypersyncBlockFetcher --- package.json | 1 + src/providers/evm/fetchers/hypersync.ts | 159 ++++++++++++++++++ src/providers/evm/fetchers/index.ts | 1 + .../providers/evm/fetchers/hypersync.test.ts | 20 +++ yarn.lock | 36 ++++ 5 files changed, 217 insertions(+) create mode 100644 src/providers/evm/fetchers/hypersync.ts create mode 100644 test/unit/providers/evm/fetchers/hypersync.test.ts diff --git a/package.json b/package.json index 7f19b77..0f325cc 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ }, "prettier": "@snapshot-labs/prettier-config", "dependencies": { + "@envio-dev/hypersync-client": "^1.3.0", "@graphql-tools/schema": "^8.5.1", "@starknet-io/types-js": "^0.9.2", "connection-string": "^4.3.5", diff --git a/src/providers/evm/fetchers/hypersync.ts b/src/providers/evm/fetchers/hypersync.ts new file mode 100644 index 0000000..14687f9 --- /dev/null +++ b/src/providers/evm/fetchers/hypersync.ts @@ -0,0 +1,159 @@ +import { + HypersyncClient, + type Query, + type Log as HypersyncLog +} from '@envio-dev/hypersync-client'; +import { type Log } from 'viem'; +import { CheckpointRecord } from '../../../stores/checkpoints'; +import { ContractSourceConfig } from '../../../types'; +import { BlockFetcher, FetchedBlock } from './types'; +import { RpcBlockFetcher } from './rpc'; + +export class HypersyncBlockFetcher implements BlockFetcher { + private readonly hypersync: HypersyncClient; + private readonly rpcFetcher: RpcBlockFetcher; + private readonly blockTimestamps = new Map(); + + constructor({ + chainId, + apiToken, + rpcUrl + }: { + chainId: number; + apiToken: string; + rpcUrl: string; + }) { + this.hypersync = new HypersyncClient({ + url: `https://${chainId}.hypersync.xyz`, + apiToken + }); + this.rpcFetcher = new RpcBlockFetcher(rpcUrl); + } + + getBlockTimestamps(): Map { + return this.blockTimestamps; + } + + async getChainId(): Promise { + return this.rpcFetcher.getChainId(); + } + + async getLatestBlockNumber(): Promise { + return this.hypersync.getHeight(); + } + + async getBlock(blockNumber: number): Promise { + return this.rpcFetcher.getBlock(blockNumber); + } + + async getBlockHash(blockNumber: number): Promise { + return this.rpcFetcher.getBlockHash(blockNumber); + } + + async getLogsByBlockHash(blockHash: string): Promise { + return this.rpcFetcher.getLogsByBlockHash(blockHash); + } + + async getLogs( + fromBlock: number, + toBlock: number, + address: string | string[], + topics: (string | string[])[] = [] + ): Promise { + const addresses = Array.isArray(address) ? address : [address]; + const topic0 = + topics.length > 0 + ? Array.isArray(topics[0]) + ? topics[0] + : [topics[0]] + : undefined; + + const query: Query = { + fromBlock, + toBlock: toBlock + 1, // HyperSync toBlock is exclusive + logs: [ + { + address: addresses, + topics: topic0 ? [topic0] : undefined + } + ], + fieldSelection: { + log: [ + 'BlockNumber', + 'TransactionHash', + 'TransactionIndex', + 'BlockHash', + 'Address', + 'Data', + 'LogIndex', + 'Topic0', + 'Topic1', + 'Topic2', + 'Topic3', + 'Removed' + ], + block: ['Number', 'Timestamp'] + } + }; + + const response = await this.hypersync.collect(query, {}); + + for (const block of response.data.blocks) { + if (block.number != null && block.timestamp != null) { + this.blockTimestamps.set(block.number, block.timestamp); + } + } + + return this.convertHypersyncLogs(response.data.logs); + } + + async getCheckpointsRange( + fromBlock: number, + toBlock: number, + sources: ContractSourceConfig[], + getEventHash: (name: string) => string + ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }> { + const chunks: ContractSourceConfig[][] = []; + for (let i = 0; i < sources.length; i += 20) { + chunks.push(sources.slice(i, i + 20)); + } + + let allLogs: Log[] = []; + for (const chunk of chunks) { + const addresses = chunk.map(source => source.contract); + const topics = chunk.flatMap(source => + source.events.map(event => getEventHash(event.name)) + ); + const logs = await this.getLogs(fromBlock, toBlock, addresses, [topics]); + allLogs = allLogs.concat(logs); + } + + const checkpoints = allLogs.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); + + return { checkpoints, logs: allLogs }; + } + + private convertHypersyncLogs(hypersyncLogs: HypersyncLog[]): Log[] { + return hypersyncLogs.map(log => { + const topics: `0x${string}`[] = []; + for (const topic of log.topics) { + if (topic) topics.push(topic as `0x${string}`); + } + + return { + address: log.address as `0x${string}`, + blockHash: log.blockHash as `0x${string}`, + blockNumber: log.blockNumber != null ? BigInt(log.blockNumber) : null, + data: (log.data ?? '0x') as `0x${string}`, + logIndex: log.logIndex ?? 0, + transactionHash: log.transactionHash as `0x${string}`, + transactionIndex: log.transactionIndex ?? 0, + removed: log.removed ?? false, + topics + } as Log; + }); + } +} diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index 35e10e3..253c841 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,2 +1,3 @@ export type { BlockFetcher, FetchedBlock } from './types'; export { RpcBlockFetcher } from './rpc'; +export { HypersyncBlockFetcher } from './hypersync'; diff --git a/test/unit/providers/evm/fetchers/hypersync.test.ts b/test/unit/providers/evm/fetchers/hypersync.test.ts new file mode 100644 index 0000000..2dfde75 --- /dev/null +++ b/test/unit/providers/evm/fetchers/hypersync.test.ts @@ -0,0 +1,20 @@ +import { HypersyncBlockFetcher } from '../../../../../src/providers/evm/fetchers/hypersync'; + +jest.mock('@envio-dev/hypersync-client', () => ({ + HypersyncClient: jest.fn().mockImplementation(() => ({ + getHeight: jest.fn(), + getChainId: jest.fn(), + collect: jest.fn() + })) +})); + +describe('HypersyncBlockFetcher', () => { + it('should be instantiated with chainId, apiToken, and rpcUrl', () => { + const fetcher = new HypersyncBlockFetcher({ + chainId: 1, + apiToken: 'test-token', + rpcUrl: 'https://rpc.example.com' + }); + expect(fetcher).toBeDefined(); + }); +}); diff --git a/yarn.lock b/yarn.lock index d887ec2..7cdf01d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -499,6 +499,42 @@ dependencies: "@jridgewell/trace-mapping" "0.3.9" +"@envio-dev/hypersync-client-darwin-arm64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-darwin-arm64/-/hypersync-client-darwin-arm64-1.3.0.tgz#570864a069d9d0aab5e0f079c9312f4760430287" + integrity sha512-JZwiVRbMSuJnKsVUpfjTHc3YgAMvGlyuqWQxVc7Eok4Xp/sZLUCXRQUykbCh6fOUWRmoa2JG/ykP/NotoTRCBg== + +"@envio-dev/hypersync-client-darwin-x64@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-darwin-x64/-/hypersync-client-darwin-x64-1.3.0.tgz#b2ff4c0f051462526380752a790ad594355cec24" + integrity sha512-2eSzQqqqFBMK2enVucYGcny5Ep4DEKYxf3Xme7z9qp2d3c6fMcbVvM4Gt8KOzb7ySjwJ2gU+qY2h545T2NiJXQ== + +"@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-linux-arm64-gnu/-/hypersync-client-linux-arm64-gnu-1.3.0.tgz#6faae143bceb44693490ac51234164b67a97f75b" + integrity sha512-gsjMp3WKekwnA89HvJXvcTM3BE5wVFG/qTF4rmk3rGiXhZ+MGaZQKrYRAhnzQZblueFtF/xnnBYpO35Z3ZFThg== + +"@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-linux-x64-gnu/-/hypersync-client-linux-x64-gnu-1.3.0.tgz#ed44ccaa118a5cb5a1f59a62812fb63e2c3324d1" + integrity sha512-Lkvi4lRVwCyFOXf9LYH2X91zmW2l1vbfojKhTwKgqFWv6PMN5atlYjt+/NcUCAAhk5EUavWGjoikwnvLp870cg== + +"@envio-dev/hypersync-client-linux-x64-musl@1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-linux-x64-musl/-/hypersync-client-linux-x64-musl-1.3.0.tgz#b7c59244376956d64c7cdb2978315ae3d0c1b84e" + integrity sha512-UIjB/gUX2sl23EMXLBxqtkgMnOjNSiaHK+CSU5vXMXkzL3fOGbz24bvyaPsSv82cxCFEE0yTwlSKkCX6/L8o6Q== + +"@envio-dev/hypersync-client@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client/-/hypersync-client-1.3.0.tgz#dd0f5dbf38144dcceb78b3d89760156ef7caf509" + integrity sha512-wUdfZzbsFPbGq6n/1mmUMsWuiAil+m+fL/GBX5LGUyMJV86TXy2SBtAqYYNyDxWLO6gvGr6PYKrP8pLVAUZDZg== + optionalDependencies: + "@envio-dev/hypersync-client-darwin-arm64" "1.3.0" + "@envio-dev/hypersync-client-darwin-x64" "1.3.0" + "@envio-dev/hypersync-client-linux-arm64-gnu" "1.3.0" + "@envio-dev/hypersync-client-linux-x64-gnu" "1.3.0" + "@envio-dev/hypersync-client-linux-x64-musl" "1.3.0" + "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.7.0": version "4.7.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz#607084630c6c033992a082de6e6fbc1a8b52175a" From c8fce5eff48288519ad711b7376db4b1d271056e Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:53:42 +0800 Subject: [PATCH 05/34] feat: wire fetcher factory with HyperSync support --- src/container.ts | 12 ++++++------ src/providers/base.ts | 4 ++-- src/providers/evm/fetchers/index.ts | 28 ++++++++++++++++++++++++++++ src/providers/evm/index.ts | 1 + src/providers/evm/indexer.ts | 14 +++----------- src/providers/starknet/indexer.ts | 2 +- 6 files changed, 41 insertions(+), 20 deletions(-) diff --git a/src/container.ts b/src/container.ts index a663c30..58e7954 100644 --- a/src/container.ts +++ b/src/container.ts @@ -76,12 +76,6 @@ export class Container implements Instance { this.indexer = indexer; this.schema = schema; this.opts = opts; - - this.indexer.init({ - instance: this, - log: this.log, - abis: config.abis - }); } public get sourceContracts() { @@ -209,6 +203,12 @@ export class Container implements Instance { * */ public async start() { + await this.indexer.init({ + instance: this, + log: this.log, + abis: this.config.abis + }); + await this.validateStore(); const templateSources = await this.store.getTemplateSources( diff --git a/src/providers/base.ts b/src/providers/base.ts index 0f47128..6fa95b4 100644 --- a/src/providers/base.ts +++ b/src/providers/base.ts @@ -103,7 +103,7 @@ export class BaseProvider { export class BaseIndexer { protected provider?: BaseProvider; - init({ + async init({ /* eslint-disable @typescript-eslint/no-unused-vars */ instance, log, @@ -113,7 +113,7 @@ export class BaseIndexer { instance: Instance; log: Logger; abis?: Record; - }) { + }): Promise { throw new Error('init method was not defined'); } diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index 253c841..3dbcf22 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,3 +1,31 @@ export type { BlockFetcher, FetchedBlock } from './types'; export { RpcBlockFetcher } from './rpc'; export { HypersyncBlockFetcher } from './hypersync'; + +import { CheckpointConfig } from '../../../types'; +import { Logger } from '../../../utils/logger'; +import { BlockFetcher } from './types'; +import { RpcBlockFetcher } from './rpc'; +import { HypersyncBlockFetcher } from './hypersync'; + +export async function createBlockFetcher( + config: CheckpointConfig, + log: Logger +): Promise { + if (config.hypersync_api_token) { + const rpcFetcher = new RpcBlockFetcher(config.network_node_url); + const chainId = await rpcFetcher.getChainId(); + + log.info({ chainId }, 'using HyperSync block fetcher'); + + return new HypersyncBlockFetcher({ + chainId, + apiToken: config.hypersync_api_token, + rpcUrl: config.network_node_url + }); + } + + log.info('using RPC block fetcher'); + + return new RpcBlockFetcher(config.network_node_url); +} diff --git a/src/providers/evm/index.ts b/src/providers/evm/index.ts index e5c759a..a1f98c0 100644 --- a/src/providers/evm/index.ts +++ b/src/providers/evm/index.ts @@ -1,3 +1,4 @@ export { EvmProvider } from './provider'; export { EvmIndexer } from './indexer'; export * from './types'; +export type { BlockFetcher, FetchedBlock } from './fetchers/types'; diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index e892ae1..5d43769 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { RpcBlockFetcher } from './fetchers/rpc'; +import { createBlockFetcher } from './fetchers'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -12,16 +12,8 @@ export class EvmIndexer extends BaseIndexer { this.writers = writers; } - init({ - instance, - log, - abis - }: { - instance: Instance; - log: Logger; - abis?: Record; - }) { - const fetcher = new RpcBlockFetcher(instance.config.network_node_url); + async init({ instance, log, abis }: { instance: Instance; log: Logger; abis?: Record }) { + const fetcher = await createBlockFetcher(instance.config, log); this.provider = new EvmProvider({ instance, diff --git a/src/providers/starknet/indexer.ts b/src/providers/starknet/indexer.ts index 12b3c2a..5f8420d 100644 --- a/src/providers/starknet/indexer.ts +++ b/src/providers/starknet/indexer.ts @@ -11,7 +11,7 @@ export class StarknetIndexer extends BaseIndexer { this.writers = writers; } - init({ + async init({ instance, log, abis From 2e1e2063e47f8925bf7e632b74e4a1446b7ab3e2 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 17:59:43 +0800 Subject: [PATCH 06/34] test: add block timestamp cache test for EvmProvider --- test/unit/providers/evm/provider-cache.test.ts | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 test/unit/providers/evm/provider-cache.test.ts diff --git a/test/unit/providers/evm/provider-cache.test.ts b/test/unit/providers/evm/provider-cache.test.ts new file mode 100644 index 0000000..1210d41 --- /dev/null +++ b/test/unit/providers/evm/provider-cache.test.ts @@ -0,0 +1,15 @@ +import { EvmProvider } from '../../../../src/providers/evm/provider'; + +describe('EvmProvider block timestamp cache', () => { + it('should store and retrieve cached timestamps', () => { + const provider = Object.create(EvmProvider.prototype); + provider.blockTimestampCache = new Map(); + + provider.cacheBlockTimestamp(100, 1700000000); + provider.cacheBlockTimestamp(101, 1700000012); + + expect(provider.blockTimestampCache.get(100)).toBe(1700000000); + expect(provider.blockTimestampCache.get(101)).toBe(1700000012); + expect(provider.blockTimestampCache.has(102)).toBe(false); + }); +}); From dce8396dbac54e9f38dbd5c17d81a5dafd959b22 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 18:24:00 +0800 Subject: [PATCH 07/34] feat: cache full block data from HyperSync to skip getBlock RPC calls HyperSync now requests Hash and ParentHash alongside Number and Timestamp. The provider uses this cached block data to skip the eth_getBlockByNumber RPC call entirely for preloaded blocks. --- src/providers/evm/fetchers/hypersync.ts | 22 +++++++--- src/providers/evm/fetchers/types.ts | 2 +- src/providers/evm/provider.ts | 44 +++++++++++-------- .../unit/providers/evm/provider-cache.test.ts | 30 +++++++++---- 4 files changed, 64 insertions(+), 34 deletions(-) diff --git a/src/providers/evm/fetchers/hypersync.ts b/src/providers/evm/fetchers/hypersync.ts index 14687f9..cd8815d 100644 --- a/src/providers/evm/fetchers/hypersync.ts +++ b/src/providers/evm/fetchers/hypersync.ts @@ -12,7 +12,7 @@ import { RpcBlockFetcher } from './rpc'; export class HypersyncBlockFetcher implements BlockFetcher { private readonly hypersync: HypersyncClient; private readonly rpcFetcher: RpcBlockFetcher; - private readonly blockTimestamps = new Map(); + private readonly blockCache = new Map(); constructor({ chainId, @@ -30,8 +30,8 @@ export class HypersyncBlockFetcher implements BlockFetcher { this.rpcFetcher = new RpcBlockFetcher(rpcUrl); } - getBlockTimestamps(): Map { - return this.blockTimestamps; + getCachedBlocks(): Map { + return this.blockCache; } async getChainId(): Promise { @@ -92,15 +92,25 @@ export class HypersyncBlockFetcher implements BlockFetcher { 'Topic3', 'Removed' ], - block: ['Number', 'Timestamp'] + block: ['Number', 'Timestamp', 'Hash', 'ParentHash'] } }; const response = await this.hypersync.collect(query, {}); for (const block of response.data.blocks) { - if (block.number != null && block.timestamp != null) { - this.blockTimestamps.set(block.number, block.timestamp); + if ( + block.number != null && + block.timestamp != null && + block.hash != null && + block.parentHash != null + ) { + this.blockCache.set(block.number, { + number: block.number, + hash: block.hash, + parentHash: block.parentHash, + timestamp: block.timestamp + }); } } diff --git a/src/providers/evm/fetchers/types.ts b/src/providers/evm/fetchers/types.ts index beca595..44d24d5 100644 --- a/src/providers/evm/fetchers/types.ts +++ b/src/providers/evm/fetchers/types.ts @@ -27,5 +27,5 @@ export type BlockFetcher = { sources: ContractSourceConfig[], getEventHash: (name: string) => string ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }>; - getBlockTimestamps?(): Map; + getCachedBlocks?(): Map; }; diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index d0b9da9..f7ffd01 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -6,7 +6,7 @@ import { ParseEventLogsReturnType, stringToBytes } from 'viem'; -import { BlockFetcher } from './fetchers/types'; +import { BlockFetcher, FetchedBlock } from './fetchers/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; @@ -18,7 +18,7 @@ export class EvmProvider extends BaseProvider { private readonly writers: Record; private sourceHashes = new Map(); private logsCache = new Map(); - blockTimestampCache = new Map(); + private blockCache = new Map(); constructor({ instance, @@ -64,15 +64,24 @@ export class EvmProvider extends BaseProvider { try { if (!hasPreloadedBlockEvents) { - const fetched = await this.fetcher.getBlock(blockNumber); - const cachedTimestamp = this.blockTimestampCache.get(blockNumber); - const timestamp = cachedTimestamp ?? fetched.timestamp; - block = { - number: BigInt(blockNumber), - hash: fetched.hash, - parentHash: fetched.parentHash, - timestamp: BigInt(timestamp) - } as Block; + const cached = this.blockCache.get(blockNumber); + if (cached) { + this.blockCache.delete(blockNumber); + block = { + number: BigInt(cached.number), + hash: cached.hash, + parentHash: cached.parentHash, + timestamp: BigInt(cached.timestamp) + } as Block; + } else { + const fetched = await this.fetcher.getBlock(blockNumber); + block = { + number: BigInt(fetched.number), + hash: fetched.hash, + parentHash: fetched.parentHash, + timestamp: BigInt(fetched.timestamp) + } as Block; + } } } catch (err) { this.log.error({ blockNumber, err }, 'getting block failed... retrying'); @@ -376,11 +385,12 @@ export class EvmProvider extends BaseProvider { this.logsCache.get(log.blockNumber)?.push(log); } - const blockTimestamps = this.fetcher.getBlockTimestamps?.(); - if (blockTimestamps) { - for (const [blockNumber, timestamp] of blockTimestamps) { - this.blockTimestampCache.set(blockNumber, timestamp); + const cachedBlocks = this.fetcher.getCachedBlocks?.(); + if (cachedBlocks) { + for (const [blockNumber, fetchedBlock] of cachedBlocks) { + this.blockCache.set(blockNumber, fetchedBlock); } + cachedBlocks.clear(); } return checkpoints; @@ -394,10 +404,6 @@ export class EvmProvider extends BaseProvider { return this.sourceHashes.get(eventName) as string; } - cacheBlockTimestamp(blockNumber: number, timestamp: number): void { - this.blockTimestampCache.set(blockNumber, timestamp); - } - compareAddress(a: string, b: string) { return a.toLowerCase() === b.toLowerCase(); } diff --git a/test/unit/providers/evm/provider-cache.test.ts b/test/unit/providers/evm/provider-cache.test.ts index 1210d41..53e558c 100644 --- a/test/unit/providers/evm/provider-cache.test.ts +++ b/test/unit/providers/evm/provider-cache.test.ts @@ -1,15 +1,29 @@ import { EvmProvider } from '../../../../src/providers/evm/provider'; +import { FetchedBlock } from '../../../../src/providers/evm/fetchers/types'; -describe('EvmProvider block timestamp cache', () => { - it('should store and retrieve cached timestamps', () => { +describe('EvmProvider block cache', () => { + it('should store and retrieve cached blocks', () => { const provider = Object.create(EvmProvider.prototype); - provider.blockTimestampCache = new Map(); + provider.blockCache = new Map(); - provider.cacheBlockTimestamp(100, 1700000000); - provider.cacheBlockTimestamp(101, 1700000012); + const block100: FetchedBlock = { + number: 100, + hash: '0xabc', + parentHash: '0xdef', + timestamp: 1700000000 + }; + const block101: FetchedBlock = { + number: 101, + hash: '0x123', + parentHash: '0xabc', + timestamp: 1700000012 + }; - expect(provider.blockTimestampCache.get(100)).toBe(1700000000); - expect(provider.blockTimestampCache.get(101)).toBe(1700000012); - expect(provider.blockTimestampCache.has(102)).toBe(false); + provider.blockCache.set(100, block100); + provider.blockCache.set(101, block101); + + expect(provider.blockCache.get(100)).toEqual(block100); + expect(provider.blockCache.get(101)).toEqual(block101); + expect(provider.blockCache.has(102)).toBe(false); }); }); From 12623986d15d877635b325f35478bd19fbcb26f5 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 18:53:51 +0800 Subject: [PATCH 08/34] refactor: replace @envio-dev/hypersync-client with plain fetch Remove the native Rust NAPI dependency and use the HyperSync HTTP JSON API directly. No platform-specific binaries needed. --- package.json | 1 - src/providers/evm/fetchers/hypersync.ts | 190 ++++++++++++------ .../providers/evm/fetchers/hypersync.test.ts | 8 - yarn.lock | 36 ---- 4 files changed, 127 insertions(+), 108 deletions(-) diff --git a/package.json b/package.json index 0f325cc..7f19b77 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,6 @@ }, "prettier": "@snapshot-labs/prettier-config", "dependencies": { - "@envio-dev/hypersync-client": "^1.3.0", "@graphql-tools/schema": "^8.5.1", "@starknet-io/types-js": "^0.9.2", "connection-string": "^4.3.5", diff --git a/src/providers/evm/fetchers/hypersync.ts b/src/providers/evm/fetchers/hypersync.ts index cd8815d..474e288 100644 --- a/src/providers/evm/fetchers/hypersync.ts +++ b/src/providers/evm/fetchers/hypersync.ts @@ -1,16 +1,61 @@ -import { - HypersyncClient, - type Query, - type Log as HypersyncLog -} from '@envio-dev/hypersync-client'; import { type Log } from 'viem'; import { CheckpointRecord } from '../../../stores/checkpoints'; import { ContractSourceConfig } from '../../../types'; import { BlockFetcher, FetchedBlock } from './types'; import { RpcBlockFetcher } from './rpc'; +type HypersyncLog = { + block_number?: number; + log_index?: number; + transaction_index?: number; + transaction_hash?: string; + block_hash?: string; + address?: string; + data?: string; + topic0?: string | null; + topic1?: string | null; + topic2?: string | null; + topic3?: string | null; + removed?: boolean; +}; + +type HypersyncBlock = { + number?: number; + timestamp?: number; + hash?: string; + parent_hash?: string; +}; + +type HypersyncResponse = { + next_block: number; + archive_height?: number; + data: { + blocks: HypersyncBlock[]; + logs: HypersyncLog[]; + }; +}; + +const FIELD_SELECTION = { + block: ['number', 'timestamp', 'hash', 'parent_hash'], + log: [ + 'block_number', + 'log_index', + 'transaction_index', + 'transaction_hash', + 'block_hash', + 'address', + 'data', + 'topic0', + 'topic1', + 'topic2', + 'topic3', + 'removed' + ] +}; + export class HypersyncBlockFetcher implements BlockFetcher { - private readonly hypersync: HypersyncClient; + private readonly url: string; + private readonly apiToken: string; private readonly rpcFetcher: RpcBlockFetcher; private readonly blockCache = new Map(); @@ -23,10 +68,8 @@ export class HypersyncBlockFetcher implements BlockFetcher { apiToken: string; rpcUrl: string; }) { - this.hypersync = new HypersyncClient({ - url: `https://${chainId}.hypersync.xyz`, - apiToken - }); + this.url = `https://${chainId}.hypersync.xyz`; + this.apiToken = apiToken; this.rpcFetcher = new RpcBlockFetcher(rpcUrl); } @@ -39,7 +82,14 @@ export class HypersyncBlockFetcher implements BlockFetcher { } async getLatestBlockNumber(): Promise { - return this.hypersync.getHeight(); + const res = await fetch(`${this.url}/height`); + if (!res.ok) { + throw new Error(`HyperSync height request failed: ${res.statusText}`); + } + + const data = await res.json(); + + return data.height; } async getBlock(blockNumber: number): Promise { @@ -68,53 +118,31 @@ export class HypersyncBlockFetcher implements BlockFetcher { : [topics[0]] : undefined; - const query: Query = { - fromBlock, - toBlock: toBlock + 1, // HyperSync toBlock is exclusive - logs: [ - { - address: addresses, - topics: topic0 ? [topic0] : undefined - } - ], - fieldSelection: { - log: [ - 'BlockNumber', - 'TransactionHash', - 'TransactionIndex', - 'BlockHash', - 'Address', - 'Data', - 'LogIndex', - 'Topic0', - 'Topic1', - 'Topic2', - 'Topic3', - 'Removed' + let allLogs: HypersyncLog[] = []; + let currentFrom = fromBlock; + const exclusiveToBlock = toBlock + 1; + + while (currentFrom < exclusiveToBlock) { + const response = await this.query({ + from_block: currentFrom, + to_block: exclusiveToBlock, + logs: [ + { + address: addresses, + topics: topic0 ? [topic0] : undefined + } ], - block: ['Number', 'Timestamp', 'Hash', 'ParentHash'] - } - }; + field_selection: FIELD_SELECTION + }); - const response = await this.hypersync.collect(query, {}); + this.cacheBlocks(response.data.blocks); + allLogs = allLogs.concat(response.data.logs); - for (const block of response.data.blocks) { - if ( - block.number != null && - block.timestamp != null && - block.hash != null && - block.parentHash != null - ) { - this.blockCache.set(block.number, { - number: block.number, - hash: block.hash, - parentHash: block.parentHash, - timestamp: block.timestamp - }); - } + if (response.next_block >= exclusiveToBlock) break; + currentFrom = response.next_block; } - return this.convertHypersyncLogs(response.data.logs); + return this.convertLogs(allLogs); } async getCheckpointsRange( @@ -146,21 +174,57 @@ export class HypersyncBlockFetcher implements BlockFetcher { return { checkpoints, logs: allLogs }; } - private convertHypersyncLogs(hypersyncLogs: HypersyncLog[]): Log[] { + private async query(body: Record): Promise { + const res = await fetch(`${this.url}/query`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${this.apiToken}` + }, + body: JSON.stringify(body) + }); + + if (!res.ok) { + throw new Error(`HyperSync query failed: ${res.statusText}`); + } + + return res.json(); + } + + private cacheBlocks(blocks: HypersyncBlock[]): void { + for (const block of blocks) { + if ( + block.number != null && + block.timestamp != null && + block.hash != null && + block.parent_hash != null + ) { + this.blockCache.set(block.number, { + number: block.number, + hash: block.hash, + parentHash: block.parent_hash, + timestamp: block.timestamp + }); + } + } + } + + private convertLogs(hypersyncLogs: HypersyncLog[]): Log[] { return hypersyncLogs.map(log => { const topics: `0x${string}`[] = []; - for (const topic of log.topics) { - if (topic) topics.push(topic as `0x${string}`); - } + if (log.topic0) topics.push(log.topic0 as `0x${string}`); + if (log.topic1) topics.push(log.topic1 as `0x${string}`); + if (log.topic2) topics.push(log.topic2 as `0x${string}`); + if (log.topic3) topics.push(log.topic3 as `0x${string}`); return { - address: log.address as `0x${string}`, - blockHash: log.blockHash as `0x${string}`, - blockNumber: log.blockNumber != null ? BigInt(log.blockNumber) : null, + address: (log.address ?? '0x') as `0x${string}`, + blockHash: (log.block_hash ?? null) as `0x${string}` | null, + blockNumber: log.block_number != null ? BigInt(log.block_number) : null, data: (log.data ?? '0x') as `0x${string}`, - logIndex: log.logIndex ?? 0, - transactionHash: log.transactionHash as `0x${string}`, - transactionIndex: log.transactionIndex ?? 0, + logIndex: log.log_index ?? 0, + transactionHash: (log.transaction_hash ?? null) as `0x${string}` | null, + transactionIndex: log.transaction_index ?? 0, removed: log.removed ?? false, topics } as Log; diff --git a/test/unit/providers/evm/fetchers/hypersync.test.ts b/test/unit/providers/evm/fetchers/hypersync.test.ts index 2dfde75..297441f 100644 --- a/test/unit/providers/evm/fetchers/hypersync.test.ts +++ b/test/unit/providers/evm/fetchers/hypersync.test.ts @@ -1,13 +1,5 @@ import { HypersyncBlockFetcher } from '../../../../../src/providers/evm/fetchers/hypersync'; -jest.mock('@envio-dev/hypersync-client', () => ({ - HypersyncClient: jest.fn().mockImplementation(() => ({ - getHeight: jest.fn(), - getChainId: jest.fn(), - collect: jest.fn() - })) -})); - describe('HypersyncBlockFetcher', () => { it('should be instantiated with chainId, apiToken, and rpcUrl', () => { const fetcher = new HypersyncBlockFetcher({ diff --git a/yarn.lock b/yarn.lock index 7cdf01d..d887ec2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -499,42 +499,6 @@ dependencies: "@jridgewell/trace-mapping" "0.3.9" -"@envio-dev/hypersync-client-darwin-arm64@1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-darwin-arm64/-/hypersync-client-darwin-arm64-1.3.0.tgz#570864a069d9d0aab5e0f079c9312f4760430287" - integrity sha512-JZwiVRbMSuJnKsVUpfjTHc3YgAMvGlyuqWQxVc7Eok4Xp/sZLUCXRQUykbCh6fOUWRmoa2JG/ykP/NotoTRCBg== - -"@envio-dev/hypersync-client-darwin-x64@1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-darwin-x64/-/hypersync-client-darwin-x64-1.3.0.tgz#b2ff4c0f051462526380752a790ad594355cec24" - integrity sha512-2eSzQqqqFBMK2enVucYGcny5Ep4DEKYxf3Xme7z9qp2d3c6fMcbVvM4Gt8KOzb7ySjwJ2gU+qY2h545T2NiJXQ== - -"@envio-dev/hypersync-client-linux-arm64-gnu@1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-linux-arm64-gnu/-/hypersync-client-linux-arm64-gnu-1.3.0.tgz#6faae143bceb44693490ac51234164b67a97f75b" - integrity sha512-gsjMp3WKekwnA89HvJXvcTM3BE5wVFG/qTF4rmk3rGiXhZ+MGaZQKrYRAhnzQZblueFtF/xnnBYpO35Z3ZFThg== - -"@envio-dev/hypersync-client-linux-x64-gnu@1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-linux-x64-gnu/-/hypersync-client-linux-x64-gnu-1.3.0.tgz#ed44ccaa118a5cb5a1f59a62812fb63e2c3324d1" - integrity sha512-Lkvi4lRVwCyFOXf9LYH2X91zmW2l1vbfojKhTwKgqFWv6PMN5atlYjt+/NcUCAAhk5EUavWGjoikwnvLp870cg== - -"@envio-dev/hypersync-client-linux-x64-musl@1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client-linux-x64-musl/-/hypersync-client-linux-x64-musl-1.3.0.tgz#b7c59244376956d64c7cdb2978315ae3d0c1b84e" - integrity sha512-UIjB/gUX2sl23EMXLBxqtkgMnOjNSiaHK+CSU5vXMXkzL3fOGbz24bvyaPsSv82cxCFEE0yTwlSKkCX6/L8o6Q== - -"@envio-dev/hypersync-client@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@envio-dev/hypersync-client/-/hypersync-client-1.3.0.tgz#dd0f5dbf38144dcceb78b3d89760156ef7caf509" - integrity sha512-wUdfZzbsFPbGq6n/1mmUMsWuiAil+m+fL/GBX5LGUyMJV86TXy2SBtAqYYNyDxWLO6gvGr6PYKrP8pLVAUZDZg== - optionalDependencies: - "@envio-dev/hypersync-client-darwin-arm64" "1.3.0" - "@envio-dev/hypersync-client-darwin-x64" "1.3.0" - "@envio-dev/hypersync-client-linux-arm64-gnu" "1.3.0" - "@envio-dev/hypersync-client-linux-x64-gnu" "1.3.0" - "@envio-dev/hypersync-client-linux-x64-musl" "1.3.0" - "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.7.0": version "4.7.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz#607084630c6c033992a082de6e6fbc1a8b52175a" From 576dfa30c637630a8a08e0bd05d7c194df1a6574 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 19:02:05 +0800 Subject: [PATCH 09/34] refactor: re-export FetchedBlock from evm/types --- src/providers/evm/index.ts | 1 - src/providers/evm/types.ts | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/providers/evm/index.ts b/src/providers/evm/index.ts index a1f98c0..e5c759a 100644 --- a/src/providers/evm/index.ts +++ b/src/providers/evm/index.ts @@ -1,4 +1,3 @@ export { EvmProvider } from './provider'; export { EvmIndexer } from './indexer'; export * from './types'; -export type { BlockFetcher, FetchedBlock } from './fetchers/types'; diff --git a/src/providers/evm/types.ts b/src/providers/evm/types.ts index 9fb7ac7..95688b6 100644 --- a/src/providers/evm/types.ts +++ b/src/providers/evm/types.ts @@ -7,6 +7,8 @@ import { } from 'viem'; import { BaseWriterParams } from '../../types'; +export { BlockFetcher, FetchedBlock } from './fetchers/types'; + export class CustomJsonRpcError extends Error { constructor( message: string, From 30bdfa58ac0bfac52779a75a5a7cc14bb97fcd39 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 19:23:49 +0800 Subject: [PATCH 10/34] refactor: lazy chainId resolution, revert async init Resolve HyperSync chainId lazily on first API call instead of at init time. This keeps init() synchronous, avoids changes to BaseIndexer, StarknetIndexer, and Container constructor. --- src/container.ts | 12 +++++----- src/providers/base.ts | 4 ++-- src/providers/evm/fetchers/hypersync.ts | 22 +++++++++++++------ src/providers/evm/fetchers/index.ts | 18 +++++---------- src/providers/evm/indexer.ts | 12 ++++++++-- src/providers/starknet/indexer.ts | 2 +- .../providers/evm/fetchers/hypersync.test.ts | 3 +-- 7 files changed, 41 insertions(+), 32 deletions(-) diff --git a/src/container.ts b/src/container.ts index 58e7954..a663c30 100644 --- a/src/container.ts +++ b/src/container.ts @@ -76,6 +76,12 @@ export class Container implements Instance { this.indexer = indexer; this.schema = schema; this.opts = opts; + + this.indexer.init({ + instance: this, + log: this.log, + abis: config.abis + }); } public get sourceContracts() { @@ -203,12 +209,6 @@ export class Container implements Instance { * */ public async start() { - await this.indexer.init({ - instance: this, - log: this.log, - abis: this.config.abis - }); - await this.validateStore(); const templateSources = await this.store.getTemplateSources( diff --git a/src/providers/base.ts b/src/providers/base.ts index 6fa95b4..0f47128 100644 --- a/src/providers/base.ts +++ b/src/providers/base.ts @@ -103,7 +103,7 @@ export class BaseProvider { export class BaseIndexer { protected provider?: BaseProvider; - async init({ + init({ /* eslint-disable @typescript-eslint/no-unused-vars */ instance, log, @@ -113,7 +113,7 @@ export class BaseIndexer { instance: Instance; log: Logger; abis?: Record; - }): Promise { + }) { throw new Error('init method was not defined'); } diff --git a/src/providers/evm/fetchers/hypersync.ts b/src/providers/evm/fetchers/hypersync.ts index 474e288..b45984c 100644 --- a/src/providers/evm/fetchers/hypersync.ts +++ b/src/providers/evm/fetchers/hypersync.ts @@ -1,4 +1,4 @@ -import { type Log } from 'viem'; +import { Log } from 'viem'; import { CheckpointRecord } from '../../../stores/checkpoints'; import { ContractSourceConfig } from '../../../types'; import { BlockFetcher, FetchedBlock } from './types'; @@ -54,25 +54,31 @@ const FIELD_SELECTION = { }; export class HypersyncBlockFetcher implements BlockFetcher { - private readonly url: string; + private url: string | null = null; private readonly apiToken: string; private readonly rpcFetcher: RpcBlockFetcher; private readonly blockCache = new Map(); constructor({ - chainId, apiToken, rpcUrl }: { - chainId: number; apiToken: string; rpcUrl: string; }) { - this.url = `https://${chainId}.hypersync.xyz`; this.apiToken = apiToken; this.rpcFetcher = new RpcBlockFetcher(rpcUrl); } + private async getUrl(): Promise { + if (!this.url) { + const chainId = await this.rpcFetcher.getChainId(); + this.url = `https://${chainId}.hypersync.xyz`; + } + + return this.url; + } + getCachedBlocks(): Map { return this.blockCache; } @@ -82,7 +88,8 @@ export class HypersyncBlockFetcher implements BlockFetcher { } async getLatestBlockNumber(): Promise { - const res = await fetch(`${this.url}/height`); + const url = await this.getUrl(); + const res = await fetch(`${url}/height`); if (!res.ok) { throw new Error(`HyperSync height request failed: ${res.statusText}`); } @@ -175,7 +182,8 @@ export class HypersyncBlockFetcher implements BlockFetcher { } private async query(body: Record): Promise { - const res = await fetch(`${this.url}/query`, { + const url = await this.getUrl(); + const res = await fetch(`${url}/query`, { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index 3dbcf22..b7478f8 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,25 +1,19 @@ -export type { BlockFetcher, FetchedBlock } from './types'; -export { RpcBlockFetcher } from './rpc'; -export { HypersyncBlockFetcher } from './hypersync'; - import { CheckpointConfig } from '../../../types'; import { Logger } from '../../../utils/logger'; -import { BlockFetcher } from './types'; +import { BlockFetcher, FetchedBlock } from './types'; import { RpcBlockFetcher } from './rpc'; import { HypersyncBlockFetcher } from './hypersync'; -export async function createBlockFetcher( +export { BlockFetcher, FetchedBlock, RpcBlockFetcher, HypersyncBlockFetcher }; + +export function createBlockFetcher( config: CheckpointConfig, log: Logger -): Promise { +): BlockFetcher { if (config.hypersync_api_token) { - const rpcFetcher = new RpcBlockFetcher(config.network_node_url); - const chainId = await rpcFetcher.getChainId(); - - log.info({ chainId }, 'using HyperSync block fetcher'); + log.info('using HyperSync block fetcher'); return new HypersyncBlockFetcher({ - chainId, apiToken: config.hypersync_api_token, rpcUrl: config.network_node_url }); diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index 5d43769..03138d6 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -12,8 +12,16 @@ export class EvmIndexer extends BaseIndexer { this.writers = writers; } - async init({ instance, log, abis }: { instance: Instance; log: Logger; abis?: Record }) { - const fetcher = await createBlockFetcher(instance.config, log); + init({ + instance, + log, + abis + }: { + instance: Instance; + log: Logger; + abis?: Record; + }) { + const fetcher = createBlockFetcher(instance.config, log); this.provider = new EvmProvider({ instance, diff --git a/src/providers/starknet/indexer.ts b/src/providers/starknet/indexer.ts index 5f8420d..12b3c2a 100644 --- a/src/providers/starknet/indexer.ts +++ b/src/providers/starknet/indexer.ts @@ -11,7 +11,7 @@ export class StarknetIndexer extends BaseIndexer { this.writers = writers; } - async init({ + init({ instance, log, abis diff --git a/test/unit/providers/evm/fetchers/hypersync.test.ts b/test/unit/providers/evm/fetchers/hypersync.test.ts index 297441f..f4dec79 100644 --- a/test/unit/providers/evm/fetchers/hypersync.test.ts +++ b/test/unit/providers/evm/fetchers/hypersync.test.ts @@ -1,9 +1,8 @@ import { HypersyncBlockFetcher } from '../../../../../src/providers/evm/fetchers/hypersync'; describe('HypersyncBlockFetcher', () => { - it('should be instantiated with chainId, apiToken, and rpcUrl', () => { + it('should be instantiated with apiToken and rpcUrl', () => { const fetcher = new HypersyncBlockFetcher({ - chainId: 1, apiToken: 'test-token', rpcUrl: 'https://rpc.example.com' }); From 487ee577845170504755474a42cfadd5824bb188 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 19:41:04 +0800 Subject: [PATCH 11/34] refactor: separate HyperSync into preloader-only role HyperSync is now a Preloader, not a BlockFetcher. The provider always uses RpcBlockFetcher for live blocks. HypersyncPreloader is only used during getCheckpointsRange for bulk historical event fetching with block data caching. --- src/providers/evm/fetchers/hypersync.ts | 150 +++++++++--------- src/providers/evm/fetchers/index.ts | 22 +-- src/providers/evm/fetchers/types.ts | 14 +- src/providers/evm/indexer.ts | 7 +- src/providers/evm/provider.ts | 51 ++++-- src/providers/evm/types.ts | 2 +- .../providers/evm/fetchers/hypersync.test.ts | 8 +- 7 files changed, 140 insertions(+), 114 deletions(-) diff --git a/src/providers/evm/fetchers/hypersync.ts b/src/providers/evm/fetchers/hypersync.ts index b45984c..cf8e8b7 100644 --- a/src/providers/evm/fetchers/hypersync.ts +++ b/src/providers/evm/fetchers/hypersync.ts @@ -1,8 +1,7 @@ import { Log } from 'viem'; import { CheckpointRecord } from '../../../stores/checkpoints'; import { ContractSourceConfig } from '../../../types'; -import { BlockFetcher, FetchedBlock } from './types'; -import { RpcBlockFetcher } from './rpc'; +import { FetchedBlock, Preloader } from './types'; type HypersyncLog = { block_number?: number; @@ -53,71 +52,84 @@ const FIELD_SELECTION = { ] }; -export class HypersyncBlockFetcher implements BlockFetcher { +export class HypersyncPreloader implements Preloader { private url: string | null = null; private readonly apiToken: string; - private readonly rpcFetcher: RpcBlockFetcher; - private readonly blockCache = new Map(); - - constructor({ - apiToken, - rpcUrl - }: { - apiToken: string; - rpcUrl: string; - }) { + private readonly rpcUrl: string; + + constructor({ apiToken, rpcUrl }: { apiToken: string; rpcUrl: string }) { this.apiToken = apiToken; - this.rpcFetcher = new RpcBlockFetcher(rpcUrl); + this.rpcUrl = rpcUrl; } private async getUrl(): Promise { if (!this.url) { - const chainId = await this.rpcFetcher.getChainId(); + const res = await fetch(this.rpcUrl, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + jsonrpc: '2.0', + id: 1, + method: 'eth_chainId', + params: [] + }) + }); + const json = await res.json(); + const chainId = parseInt(json.result, 16); this.url = `https://${chainId}.hypersync.xyz`; } return this.url; } - getCachedBlocks(): Map { - return this.blockCache; - } - - async getChainId(): Promise { - return this.rpcFetcher.getChainId(); - } - - async getLatestBlockNumber(): Promise { - const url = await this.getUrl(); - const res = await fetch(`${url}/height`); - if (!res.ok) { - throw new Error(`HyperSync height request failed: ${res.statusText}`); + async getCheckpointsRange( + fromBlock: number, + toBlock: number, + sources: ContractSourceConfig[], + getEventHash: (name: string) => string + ): Promise<{ + checkpoints: CheckpointRecord[]; + logs: Log[]; + blocks: FetchedBlock[]; + }> { + const chunks: ContractSourceConfig[][] = []; + for (let i = 0; i < sources.length; i += 20) { + chunks.push(sources.slice(i, i + 20)); } - const data = await res.json(); + let allLogs: Log[] = []; + const allBlocks: FetchedBlock[] = []; - return data.height; - } + for (const chunk of chunks) { + const addresses = chunk.map(source => source.contract); + const topics = chunk.flatMap(source => + source.events.map(event => getEventHash(event.name)) + ); - async getBlock(blockNumber: number): Promise { - return this.rpcFetcher.getBlock(blockNumber); - } + const { logs, blocks } = await this.fetchLogs( + fromBlock, + toBlock, + addresses, + [topics] + ); + allLogs = allLogs.concat(logs); + allBlocks.push(...blocks); + } - async getBlockHash(blockNumber: number): Promise { - return this.rpcFetcher.getBlockHash(blockNumber); - } + const checkpoints = allLogs.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); - async getLogsByBlockHash(blockHash: string): Promise { - return this.rpcFetcher.getLogsByBlockHash(blockHash); + return { checkpoints, logs: allLogs, blocks: allBlocks }; } - async getLogs( + private async fetchLogs( fromBlock: number, toBlock: number, - address: string | string[], - topics: (string | string[])[] = [] - ): Promise { - const addresses = Array.isArray(address) ? address : [address]; + addresses: string[], + topics: (string | string[])[] + ): Promise<{ logs: Log[]; blocks: FetchedBlock[] }> { const topic0 = topics.length > 0 ? Array.isArray(topics[0]) @@ -126,6 +138,7 @@ export class HypersyncBlockFetcher implements BlockFetcher { : undefined; let allLogs: HypersyncLog[] = []; + const allBlocks: FetchedBlock[] = []; let currentFrom = fromBlock; const exclusiveToBlock = toBlock + 1; @@ -142,46 +155,19 @@ export class HypersyncBlockFetcher implements BlockFetcher { field_selection: FIELD_SELECTION }); - this.cacheBlocks(response.data.blocks); + allBlocks.push(...this.convertBlocks(response.data.blocks)); allLogs = allLogs.concat(response.data.logs); if (response.next_block >= exclusiveToBlock) break; currentFrom = response.next_block; } - return this.convertLogs(allLogs); - } - - async getCheckpointsRange( - fromBlock: number, - toBlock: number, - sources: ContractSourceConfig[], - getEventHash: (name: string) => string - ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }> { - const chunks: ContractSourceConfig[][] = []; - for (let i = 0; i < sources.length; i += 20) { - chunks.push(sources.slice(i, i + 20)); - } - - let allLogs: Log[] = []; - for (const chunk of chunks) { - const addresses = chunk.map(source => source.contract); - const topics = chunk.flatMap(source => - source.events.map(event => getEventHash(event.name)) - ); - const logs = await this.getLogs(fromBlock, toBlock, addresses, [topics]); - allLogs = allLogs.concat(logs); - } - - const checkpoints = allLogs.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); - - return { checkpoints, logs: allLogs }; + return { logs: this.convertLogs(allLogs), blocks: allBlocks }; } - private async query(body: Record): Promise { + private async query( + body: Record + ): Promise { const url = await this.getUrl(); const res = await fetch(`${url}/query`, { method: 'POST', @@ -199,7 +185,9 @@ export class HypersyncBlockFetcher implements BlockFetcher { return res.json(); } - private cacheBlocks(blocks: HypersyncBlock[]): void { + private convertBlocks(blocks: HypersyncBlock[]): FetchedBlock[] { + const result: FetchedBlock[] = []; + for (const block of blocks) { if ( block.number != null && @@ -207,7 +195,7 @@ export class HypersyncBlockFetcher implements BlockFetcher { block.hash != null && block.parent_hash != null ) { - this.blockCache.set(block.number, { + result.push({ number: block.number, hash: block.hash, parentHash: block.parent_hash, @@ -215,6 +203,8 @@ export class HypersyncBlockFetcher implements BlockFetcher { }); } } + + return result; } private convertLogs(hypersyncLogs: HypersyncLog[]): Log[] { @@ -231,7 +221,9 @@ export class HypersyncBlockFetcher implements BlockFetcher { blockNumber: log.block_number != null ? BigInt(log.block_number) : null, data: (log.data ?? '0x') as `0x${string}`, logIndex: log.log_index ?? 0, - transactionHash: (log.transaction_hash ?? null) as `0x${string}` | null, + transactionHash: (log.transaction_hash ?? null) as + | `0x${string}` + | null, transactionIndex: log.transaction_index ?? 0, removed: log.removed ?? false, topics diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index b7478f8..16e8568 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,25 +1,27 @@ import { CheckpointConfig } from '../../../types'; import { Logger } from '../../../utils/logger'; -import { BlockFetcher, FetchedBlock } from './types'; +import { BlockFetcher, FetchedBlock, Preloader } from './types'; import { RpcBlockFetcher } from './rpc'; -import { HypersyncBlockFetcher } from './hypersync'; +import { HypersyncPreloader } from './hypersync'; -export { BlockFetcher, FetchedBlock, RpcBlockFetcher, HypersyncBlockFetcher }; +export { BlockFetcher, FetchedBlock, Preloader, RpcBlockFetcher, HypersyncPreloader }; -export function createBlockFetcher( +export function createFetchers( config: CheckpointConfig, log: Logger -): BlockFetcher { +): { fetcher: BlockFetcher; preloader?: Preloader } { + const fetcher = new RpcBlockFetcher(config.network_node_url); + if (config.hypersync_api_token) { - log.info('using HyperSync block fetcher'); + log.info('using HyperSync preloader'); - return new HypersyncBlockFetcher({ + const preloader = new HypersyncPreloader({ apiToken: config.hypersync_api_token, rpcUrl: config.network_node_url }); - } - log.info('using RPC block fetcher'); + return { fetcher, preloader }; + } - return new RpcBlockFetcher(config.network_node_url); + return { fetcher }; } diff --git a/src/providers/evm/fetchers/types.ts b/src/providers/evm/fetchers/types.ts index 44d24d5..3f03e63 100644 --- a/src/providers/evm/fetchers/types.ts +++ b/src/providers/evm/fetchers/types.ts @@ -27,5 +27,17 @@ export type BlockFetcher = { sources: ContractSourceConfig[], getEventHash: (name: string) => string ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }>; - getCachedBlocks?(): Map; +}; + +export type Preloader = { + getCheckpointsRange( + fromBlock: number, + toBlock: number, + sources: ContractSourceConfig[], + getEventHash: (name: string) => string + ): Promise<{ + checkpoints: CheckpointRecord[]; + logs: Log[]; + blocks: FetchedBlock[]; + }>; }; diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index 03138d6..406a2cd 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { createBlockFetcher } from './fetchers'; +import { createFetchers } from './fetchers'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -21,14 +21,15 @@ export class EvmIndexer extends BaseIndexer { log: Logger; abis?: Record; }) { - const fetcher = createBlockFetcher(instance.config, log); + const { fetcher, preloader } = createFetchers(instance.config, log); this.provider = new EvmProvider({ instance, log, abis, writers: this.writers, - fetcher + fetcher, + preloader }); } diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index f7ffd01..e83b151 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -6,7 +6,7 @@ import { ParseEventLogsReturnType, stringToBytes } from 'viem'; -import { BlockFetcher, FetchedBlock } from './fetchers/types'; +import { BlockFetcher, FetchedBlock, Preloader } from './fetchers/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; @@ -14,6 +14,7 @@ import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; export class EvmProvider extends BaseProvider { private readonly fetcher: BlockFetcher; + private readonly preloader?: Preloader; private readonly writers: Record; private sourceHashes = new Map(); @@ -25,14 +26,17 @@ export class EvmProvider extends BaseProvider { log, abis, writers, - fetcher + fetcher, + preloader }: ConstructorParameters[0] & { writers: Record; fetcher: BlockFetcher; + preloader?: Preloader; }) { super({ instance, log, abis }); this.fetcher = fetcher; + this.preloader = preloader; this.writers = writers; } @@ -368,12 +372,35 @@ export class EvmProvider extends BaseProvider { fromBlock: number, toBlock: number ): Promise { - const { checkpoints, logs } = await this.fetcher.getCheckpointsRange( - fromBlock, - toBlock, - this.instance.getCurrentSources(toBlock), - name => this.getEventHash(name) - ); + const sources = this.instance.getCurrentSources(toBlock); + const getEventHash = (name: string) => this.getEventHash(name); + + let checkpoints: CheckpointRecord[]; + let logs: Log[]; + + if (this.preloader) { + const result = await this.preloader.getCheckpointsRange( + fromBlock, + toBlock, + sources, + getEventHash + ); + checkpoints = result.checkpoints; + logs = result.logs; + + for (const block of result.blocks) { + this.blockCache.set(block.number, block); + } + } else { + const result = await this.fetcher.getCheckpointsRange( + fromBlock, + toBlock, + sources, + getEventHash + ); + checkpoints = result.checkpoints; + logs = result.logs; + } for (const log of logs) { if (log.blockNumber === null) continue; @@ -385,14 +412,6 @@ export class EvmProvider extends BaseProvider { this.logsCache.get(log.blockNumber)?.push(log); } - const cachedBlocks = this.fetcher.getCachedBlocks?.(); - if (cachedBlocks) { - for (const [blockNumber, fetchedBlock] of cachedBlocks) { - this.blockCache.set(blockNumber, fetchedBlock); - } - cachedBlocks.clear(); - } - return checkpoints; } diff --git a/src/providers/evm/types.ts b/src/providers/evm/types.ts index 95688b6..f0cfd72 100644 --- a/src/providers/evm/types.ts +++ b/src/providers/evm/types.ts @@ -7,7 +7,7 @@ import { } from 'viem'; import { BaseWriterParams } from '../../types'; -export { BlockFetcher, FetchedBlock } from './fetchers/types'; +export { BlockFetcher, FetchedBlock, Preloader } from './fetchers/types'; export class CustomJsonRpcError extends Error { constructor( diff --git a/test/unit/providers/evm/fetchers/hypersync.test.ts b/test/unit/providers/evm/fetchers/hypersync.test.ts index f4dec79..ac56575 100644 --- a/test/unit/providers/evm/fetchers/hypersync.test.ts +++ b/test/unit/providers/evm/fetchers/hypersync.test.ts @@ -1,11 +1,11 @@ -import { HypersyncBlockFetcher } from '../../../../../src/providers/evm/fetchers/hypersync'; +import { HypersyncPreloader } from '../../../../../src/providers/evm/fetchers/hypersync'; -describe('HypersyncBlockFetcher', () => { +describe('HypersyncPreloader', () => { it('should be instantiated with apiToken and rpcUrl', () => { - const fetcher = new HypersyncBlockFetcher({ + const preloader = new HypersyncPreloader({ apiToken: 'test-token', rpcUrl: 'https://rpc.example.com' }); - expect(fetcher).toBeDefined(); + expect(preloader).toBeDefined(); }); }); From b6bfdc1f37bd5e9b93b76b8686431f8116d9e042 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 22:27:21 +0800 Subject: [PATCH 12/34] refactor: remove BlockFetcher abstraction, restore viem client on provider MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit HyperSync only needs to replace preloading. Remove BlockFetcher interface and RpcBlockFetcher — restore the original viem PublicClient on EvmProvider for all live block operations. Only the Preloader interface remains for optional HyperSync bulk fetching. --- src/providers/evm/fetchers/index.ts | 17 +- src/providers/evm/fetchers/rpc.ts | 230 ------------------- src/providers/evm/fetchers/types.ts | 20 -- src/providers/evm/indexer.ts | 7 +- src/providers/evm/provider.ts | 218 +++++++++++++++--- src/providers/evm/types.ts | 2 +- test/unit/providers/evm/fetchers/rpc.test.ts | 8 - 7 files changed, 197 insertions(+), 305 deletions(-) delete mode 100644 src/providers/evm/fetchers/rpc.ts delete mode 100644 test/unit/providers/evm/fetchers/rpc.test.ts diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index 16e8568..5f63d83 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,27 +1,22 @@ import { CheckpointConfig } from '../../../types'; import { Logger } from '../../../utils/logger'; -import { BlockFetcher, FetchedBlock, Preloader } from './types'; -import { RpcBlockFetcher } from './rpc'; +import { FetchedBlock, Preloader } from './types'; import { HypersyncPreloader } from './hypersync'; -export { BlockFetcher, FetchedBlock, Preloader, RpcBlockFetcher, HypersyncPreloader }; +export { FetchedBlock, Preloader, HypersyncPreloader }; -export function createFetchers( +export function createPreloader( config: CheckpointConfig, log: Logger -): { fetcher: BlockFetcher; preloader?: Preloader } { - const fetcher = new RpcBlockFetcher(config.network_node_url); - +): Preloader | undefined { if (config.hypersync_api_token) { log.info('using HyperSync preloader'); - const preloader = new HypersyncPreloader({ + return new HypersyncPreloader({ apiToken: config.hypersync_api_token, rpcUrl: config.network_node_url }); - - return { fetcher, preloader }; } - return { fetcher }; + return undefined; } diff --git a/src/providers/evm/fetchers/rpc.ts b/src/providers/evm/fetchers/rpc.ts deleted file mode 100644 index d56f5d0..0000000 --- a/src/providers/evm/fetchers/rpc.ts +++ /dev/null @@ -1,230 +0,0 @@ -import { - createPublicClient, - formatLog, - http, - Log, - PublicClient, - RpcLog -} from 'viem'; -import { BlockFetcher, FetchedBlock } from './types'; -import { getRangeHint } from '../helpers'; -import { CustomJsonRpcError } from '../types'; -import { CheckpointRecord } from '../../../stores/checkpoints'; -import { ContractSourceConfig } from '../../../types'; -import { sleep } from '../../../utils/helpers'; - -type GetLogsBlockHashFilter = { - blockHash: string; -}; - -type GetLogsBlockRangeFilter = { - fromBlock: number; - toBlock: number; -}; - -/** - * Timeout for client requests in milliseconds. - * This timeout is also used when fetching latest blocks in getLogs. - */ -const CLIENT_TIMEOUT = 5 * 1000; - -const MAX_BLOCKS_PER_REQUEST = 10000; - -export class RpcBlockFetcher implements BlockFetcher { - private readonly client: PublicClient; - private readonly networkNodeUrl: string; - - constructor(networkNodeUrl: string) { - this.networkNodeUrl = networkNodeUrl; - this.client = createPublicClient({ - transport: http(networkNodeUrl, { - timeout: CLIENT_TIMEOUT - }) - }); - } - - async getChainId(): Promise { - return this.client.getChainId(); - } - - async getLatestBlockNumber(): Promise { - const blockNumber = await this.client.getBlockNumber(); - - return Number(blockNumber); - } - - async getBlock(blockNumber: number): Promise { - const block = await this.client.getBlock({ - blockNumber: BigInt(blockNumber) - }); - - return { - number: Number(block.number), - hash: block.hash, - parentHash: block.parentHash, - timestamp: Number(block.timestamp) - }; - } - - async getBlockHash(blockNumber: number): Promise { - const block = await this.client.getBlock({ - blockNumber: BigInt(blockNumber) - }); - - return block.hash; - } - - /** - * This method is a simpler implementation of getLogs method. - * This allows using two filters that are not supported in ethers v5: - * - `blockHash` to get logs for a specific block - if node doesn't know about that block it will fail. - * - `address` as a single address or an array of addresses. - * @param filter Logs filter - */ - private async _getLogs( - filter: (GetLogsBlockHashFilter | GetLogsBlockRangeFilter) & { - address?: string | string[]; - topics?: (string | string[])[]; - } - ): Promise { - const params: { - fromBlock?: string; - toBlock?: string; - blockHash?: string; - address?: string | string[]; - topics?: (string | string[])[]; - } = {}; - - let signal: AbortSignal | undefined; - - if ('blockHash' in filter) { - signal = AbortSignal.timeout(CLIENT_TIMEOUT); - params.blockHash = filter.blockHash; - } - - if ('fromBlock' in filter) { - params.fromBlock = `0x${filter.fromBlock.toString(16)}`; - } - - if ('toBlock' in filter) { - params.toBlock = `0x${filter.toBlock.toString(16)}`; - } - - if ('address' in filter) { - params.address = filter.address; - } - - if ('topics' in filter) { - params.topics = filter.topics; - } - - const res = await fetch(this.networkNodeUrl, { - method: 'POST', - signal, - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - jsonrpc: '2.0', - id: 1, - method: 'eth_getLogs', - params: [params] - }) - }); - - if (!res.ok) { - throw new Error(`Request failed: ${res.statusText}`); - } - - const json = await res.json(); - - if (json.error) { - throw new CustomJsonRpcError( - json.error.message, - json.error.code, - json.error.data - ); - } - - return json.result.map((log: RpcLog) => formatLog(log)); - } - - async getLogsByBlockHash(blockHash: string): Promise { - return this._getLogs({ blockHash }); - } - - async getLogs( - fromBlock: number, - toBlock: number, - address: string | string[], - topics: (string | string[])[] = [] - ): Promise { - let result = [] as Log[]; - - let currentFrom = fromBlock; - let currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); - while (true) { - try { - const logs = await this._getLogs({ - fromBlock: currentFrom, - toBlock: currentTo, - address, - topics - }); - - result = result.concat(logs); - - if (currentTo === toBlock) break; - currentFrom = currentTo + 1; - currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); - } catch (err: unknown) { - const rangeHint = getRangeHint(err, { - from: currentFrom, - to: currentTo - }); - - if (rangeHint) { - currentFrom = rangeHint.from; - currentTo = rangeHint.to; - continue; - } - - await sleep(5000); - } - } - - return result; - } - - async getCheckpointsRange( - fromBlock: number, - toBlock: number, - sources: ContractSourceConfig[], - getEventHash: (name: string) => string - ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }> { - const chunks: ContractSourceConfig[][] = []; - for (let i = 0; i < sources.length; i += 20) { - chunks.push(sources.slice(i, i + 20)); - } - - let logs: Log[] = []; - for (const chunk of chunks) { - const address = chunk.map(source => source.contract); - const topics = chunk.flatMap(source => - source.events.map(event => getEventHash(event.name)) - ); - - const chunkLogs = await this.getLogs(fromBlock, toBlock, address, [ - topics - ]); - logs = logs.concat(chunkLogs); - } - - const checkpoints = logs.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); - - return { checkpoints, logs }; - } -} diff --git a/src/providers/evm/fetchers/types.ts b/src/providers/evm/fetchers/types.ts index 3f03e63..56e086f 100644 --- a/src/providers/evm/fetchers/types.ts +++ b/src/providers/evm/fetchers/types.ts @@ -9,26 +9,6 @@ export type FetchedBlock = { timestamp: number; }; -export type BlockFetcher = { - getChainId(): Promise; - getLatestBlockNumber(): Promise; - getBlock(blockNumber: number): Promise; - getBlockHash(blockNumber: number): Promise; - getLogs( - fromBlock: number, - toBlock: number, - address: string | string[], - topics?: (string | string[])[] - ): Promise; - getLogsByBlockHash(blockHash: string): Promise; - getCheckpointsRange( - fromBlock: number, - toBlock: number, - sources: ContractSourceConfig[], - getEventHash: (name: string) => string - ): Promise<{ checkpoints: CheckpointRecord[]; logs: Log[] }>; -}; - export type Preloader = { getCheckpointsRange( fromBlock: number, diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index 406a2cd..8cdfa24 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { createFetchers } from './fetchers'; +import { createPreloader } from './fetchers'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -21,15 +21,12 @@ export class EvmIndexer extends BaseIndexer { log: Logger; abis?: Record; }) { - const { fetcher, preloader } = createFetchers(instance.config, log); - this.provider = new EvmProvider({ instance, log, abis, writers: this.writers, - fetcher, - preloader + preloader: createPreloader(instance.config, log) }); } diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index e83b151..c1fc35b 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -1,19 +1,43 @@ import { + createPublicClient, + formatLog, getAddress, + http, keccak256, Log, parseEventLogs, ParseEventLogsReturnType, + PublicClient, + RpcLog, stringToBytes } from 'viem'; -import { BlockFetcher, FetchedBlock, Preloader } from './fetchers/types'; +import { getRangeHint } from './helpers'; +import { FetchedBlock, Preloader } from './fetchers/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; +import { sleep } from '../../utils/helpers'; import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; +type GetLogsBlockHashFilter = { + blockHash: string; +}; + +type GetLogsBlockRangeFilter = { + fromBlock: number; + toBlock: number; +}; + +/** + * Timeout for client requests in milliseconds. + * This timeout is also used when fetching latest blocks in getLogs. + */ +const CLIENT_TIMEOUT = 5 * 1000; + +const MAX_BLOCKS_PER_REQUEST = 10000; + export class EvmProvider extends BaseProvider { - private readonly fetcher: BlockFetcher; + private readonly client: PublicClient; private readonly preloader?: Preloader; private readonly writers: Record; @@ -26,16 +50,19 @@ export class EvmProvider extends BaseProvider { log, abis, writers, - fetcher, preloader }: ConstructorParameters[0] & { writers: Record; - fetcher: BlockFetcher; preloader?: Preloader; }) { super({ instance, log, abis }); - this.fetcher = fetcher; + this.client = createPublicClient({ + transport: http(instance.config.network_node_url, { + timeout: CLIENT_TIMEOUT + }) + }); + this.preloader = preloader; this.writers = writers; } @@ -45,17 +72,23 @@ export class EvmProvider extends BaseProvider { } async getNetworkIdentifier(): Promise { - const chainId = await this.fetcher.getChainId(); + const chainId = await this.client.getChainId(); return `evm_${chainId}`; } async getLatestBlockNumber(): Promise { - return this.fetcher.getLatestBlockNumber(); + const blockNumber = await this.client.getBlockNumber(); + + return Number(blockNumber); } async getBlockHash(blockNumber: number) { - return this.fetcher.getBlockHash(blockNumber); + const block = await this.client.getBlock({ + blockNumber: BigInt(blockNumber) + }); + + return block.hash; } async processBlock(blockNumber: number, parentHash: string | null) { @@ -78,13 +111,9 @@ export class EvmProvider extends BaseProvider { timestamp: BigInt(cached.timestamp) } as Block; } else { - const fetched = await this.fetcher.getBlock(blockNumber); - block = { - number: BigInt(fetched.number), - hash: fetched.hash, - parentHash: fetched.parentHash, - timestamp: BigInt(fetched.timestamp) - } as Block; + block = await this.client.getBlock({ + blockNumber: BigInt(blockNumber) + }); } } } catch (err) { @@ -318,7 +347,9 @@ export class EvmProvider extends BaseProvider { throw new Error('Block hash is required to fetch logs from network'); } - events = await this.fetcher.getLogsByBlockHash(blockHash); + events = await this._getLogs({ + blockHash + }); } return { @@ -335,6 +366,134 @@ export class EvmProvider extends BaseProvider { }; } + /** + * This method is simpler implementation of getLogs method. + * This allows using two filters that are not supported in ethers v5: + * - `blockHash` to get logs for a specific block - if node doesn't know about that block it will fail. + * - `address` as a single address or an array of addresses. + * @param filter Logs filter + */ + private async _getLogs( + filter: (GetLogsBlockHashFilter | GetLogsBlockRangeFilter) & { + address?: string | string[]; + topics?: (string | string[])[]; + } + ): Promise { + const params: { + fromBlock?: string; + toBlock?: string; + blockHash?: string; + address?: string | string[]; + topics?: (string | string[])[]; + } = {}; + + let signal: AbortSignal | undefined; + + if ('blockHash' in filter) { + signal = AbortSignal.timeout(CLIENT_TIMEOUT); + params.blockHash = filter.blockHash; + } + + if ('fromBlock' in filter) { + params.fromBlock = `0x${filter.fromBlock.toString(16)}`; + } + + if ('toBlock' in filter) { + params.toBlock = `0x${filter.toBlock.toString(16)}`; + } + + if ('address' in filter) { + params.address = filter.address; + } + + if ('topics' in filter) { + params.topics = filter.topics; + } + + const res = await fetch(this.instance.config.network_node_url, { + method: 'POST', + signal, + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + jsonrpc: '2.0', + id: 1, + method: 'eth_getLogs', + params: [params] + }) + }); + + if (!res.ok) { + throw new Error(`Request failed: ${res.statusText}`); + } + + const json = await res.json(); + + if (json.error) { + throw new CustomJsonRpcError( + json.error.message, + json.error.code, + json.error.data + ); + } + + return json.result.map((log: RpcLog) => formatLog(log)); + } + + async getLogs( + fromBlock: number, + toBlock: number, + address: string | string[], + topics: (string | string[])[] = [] + ): Promise { + let result = [] as Log[]; + + let currentFrom = fromBlock; + let currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); + while (true) { + try { + const logs = await this._getLogs({ + fromBlock: currentFrom, + toBlock: currentTo, + address, + topics + }); + + result = result.concat(logs); + + if (currentTo === toBlock) break; + currentFrom = currentTo + 1; + currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); + } catch (err: unknown) { + const rangeHint = getRangeHint(err, { + from: currentFrom, + to: currentTo + }); + + if (rangeHint) { + this.log.warn( + { err, rangeHint, fromBlock: currentFrom, toBlock: currentTo }, + 'getLogs failed. Received new range hint' + ); + + currentFrom = rangeHint.from; + currentTo = rangeHint.to; + continue; + } + + this.log.error( + { fromBlock: currentFrom, toBlock: currentTo, address, err }, + 'getLogs failed' + ); + + await sleep(5000); + } + } + + return result; + } + async getLogsForSources({ fromBlock, toBlock, @@ -356,12 +515,9 @@ export class EvmProvider extends BaseProvider { source.events.map(event => this.getEventHash(event.name)) ); - const chunkEvents = await this.fetcher.getLogs( - fromBlock, - toBlock, - address, - [topics] - ); + const chunkEvents = await this.getLogs(fromBlock, toBlock, address, [ + topics + ]); events = events.concat(chunkEvents); } @@ -373,7 +529,6 @@ export class EvmProvider extends BaseProvider { toBlock: number ): Promise { const sources = this.instance.getCurrentSources(toBlock); - const getEventHash = (name: string) => this.getEventHash(name); let checkpoints: CheckpointRecord[]; let logs: Log[]; @@ -383,7 +538,7 @@ export class EvmProvider extends BaseProvider { fromBlock, toBlock, sources, - getEventHash + name => this.getEventHash(name) ); checkpoints = result.checkpoints; logs = result.logs; @@ -392,14 +547,17 @@ export class EvmProvider extends BaseProvider { this.blockCache.set(block.number, block); } } else { - const result = await this.fetcher.getCheckpointsRange( + const events = await this.getLogsForSources({ fromBlock, toBlock, - sources, - getEventHash - ); - checkpoints = result.checkpoints; - logs = result.logs; + sources + }); + + checkpoints = events.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); + logs = events; } for (const log of logs) { diff --git a/src/providers/evm/types.ts b/src/providers/evm/types.ts index f0cfd72..e3d2f64 100644 --- a/src/providers/evm/types.ts +++ b/src/providers/evm/types.ts @@ -7,7 +7,7 @@ import { } from 'viem'; import { BaseWriterParams } from '../../types'; -export { BlockFetcher, FetchedBlock, Preloader } from './fetchers/types'; +export { FetchedBlock, Preloader } from './fetchers/types'; export class CustomJsonRpcError extends Error { constructor( diff --git a/test/unit/providers/evm/fetchers/rpc.test.ts b/test/unit/providers/evm/fetchers/rpc.test.ts deleted file mode 100644 index 0fba556..0000000 --- a/test/unit/providers/evm/fetchers/rpc.test.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { RpcBlockFetcher } from '../../../../../src/providers/evm/fetchers/rpc'; - -describe('RpcBlockFetcher', () => { - it('should be instantiated with a network URL', () => { - const fetcher = new RpcBlockFetcher('https://rpc.example.com'); - expect(fetcher).toBeDefined(); - }); -}); From 574d6ff4a9e201e9335bbbba530f3713c4f48237 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 22:56:46 +0800 Subject: [PATCH 13/34] refactor: extract RpcPreloader, always use preloader plugin Provider always delegates preloading to a Preloader instance. RpcPreloader wraps the existing getLogs logic, HypersyncPreloader uses the HyperSync HTTP API. No conditional in getCheckpointsRange. --- src/providers/evm/fetchers/index.ts | 21 +--------- src/providers/evm/fetchers/rpc.ts | 60 +++++++++++++++++++++++++++++ src/providers/evm/indexer.ts | 13 ++++++- src/providers/evm/provider.ts | 32 ++++----------- 4 files changed, 80 insertions(+), 46 deletions(-) create mode 100644 src/providers/evm/fetchers/rpc.ts diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index 5f63d83..f3599ff 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,22 +1,5 @@ -import { CheckpointConfig } from '../../../types'; -import { Logger } from '../../../utils/logger'; import { FetchedBlock, Preloader } from './types'; +import { RpcPreloader } from './rpc'; import { HypersyncPreloader } from './hypersync'; -export { FetchedBlock, Preloader, HypersyncPreloader }; - -export function createPreloader( - config: CheckpointConfig, - log: Logger -): Preloader | undefined { - if (config.hypersync_api_token) { - log.info('using HyperSync preloader'); - - return new HypersyncPreloader({ - apiToken: config.hypersync_api_token, - rpcUrl: config.network_node_url - }); - } - - return undefined; -} +export { FetchedBlock, Preloader, RpcPreloader, HypersyncPreloader }; diff --git a/src/providers/evm/fetchers/rpc.ts b/src/providers/evm/fetchers/rpc.ts new file mode 100644 index 0000000..da0e9df --- /dev/null +++ b/src/providers/evm/fetchers/rpc.ts @@ -0,0 +1,60 @@ +import { Log } from 'viem'; +import { CheckpointRecord } from '../../../stores/checkpoints'; +import { ContractSourceConfig } from '../../../types'; +import { FetchedBlock, Preloader } from './types'; + +export class RpcPreloader implements Preloader { + private readonly getLogs: ( + fromBlock: number, + toBlock: number, + address: string | string[], + topics?: (string | string[])[] + ) => Promise; + + constructor( + getLogs: ( + fromBlock: number, + toBlock: number, + address: string | string[], + topics?: (string | string[])[] + ) => Promise + ) { + this.getLogs = getLogs; + } + + async getCheckpointsRange( + fromBlock: number, + toBlock: number, + sources: ContractSourceConfig[], + getEventHash: (name: string) => string + ): Promise<{ + checkpoints: CheckpointRecord[]; + logs: Log[]; + blocks: FetchedBlock[]; + }> { + const chunks: ContractSourceConfig[][] = []; + for (let i = 0; i < sources.length; i += 20) { + chunks.push(sources.slice(i, i + 20)); + } + + let logs: Log[] = []; + for (const chunk of chunks) { + const address = chunk.map(source => source.contract); + const topics = chunk.flatMap(source => + source.events.map(event => getEventHash(event.name)) + ); + + const chunkLogs = await this.getLogs(fromBlock, toBlock, address, [ + topics + ]); + logs = logs.concat(chunkLogs); + } + + const checkpoints = logs.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); + + return { checkpoints, logs, blocks: [] }; + } +} diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index 8cdfa24..dc99c1a 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { createPreloader } from './fetchers'; +import { HypersyncPreloader } from './fetchers/hypersync'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -21,12 +21,21 @@ export class EvmIndexer extends BaseIndexer { log: Logger; abis?: Record; }) { + let preloader; + if (instance.config.hypersync_api_token) { + log.info('using HyperSync preloader'); + preloader = new HypersyncPreloader({ + apiToken: instance.config.hypersync_api_token, + rpcUrl: instance.config.network_node_url + }); + } + this.provider = new EvmProvider({ instance, log, abis, writers: this.writers, - preloader: createPreloader(instance.config, log) + preloader }); } diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index c1fc35b..69f9333 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -13,6 +13,7 @@ import { } from 'viem'; import { getRangeHint } from './helpers'; import { FetchedBlock, Preloader } from './fetchers/types'; +import { RpcPreloader } from './fetchers/rpc'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; @@ -38,7 +39,7 @@ const MAX_BLOCKS_PER_REQUEST = 10000; export class EvmProvider extends BaseProvider { private readonly client: PublicClient; - private readonly preloader?: Preloader; + private readonly preloader: Preloader; private readonly writers: Record; private sourceHashes = new Map(); @@ -63,7 +64,7 @@ export class EvmProvider extends BaseProvider { }) }); - this.preloader = preloader; + this.preloader = preloader ?? new RpcPreloader(this.getLogs.bind(this)); this.writers = writers; } @@ -529,35 +530,16 @@ export class EvmProvider extends BaseProvider { toBlock: number ): Promise { const sources = this.instance.getCurrentSources(toBlock); - - let checkpoints: CheckpointRecord[]; - let logs: Log[]; - - if (this.preloader) { - const result = await this.preloader.getCheckpointsRange( + const { checkpoints, logs, blocks } = + await this.preloader.getCheckpointsRange( fromBlock, toBlock, sources, name => this.getEventHash(name) ); - checkpoints = result.checkpoints; - logs = result.logs; - - for (const block of result.blocks) { - this.blockCache.set(block.number, block); - } - } else { - const events = await this.getLogsForSources({ - fromBlock, - toBlock, - sources - }); - checkpoints = events.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); - logs = events; + for (const block of blocks) { + this.blockCache.set(block.number, block); } for (const log of logs) { From de7aafe360bffe15b67236e2e7292b1870183584 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 23:23:53 +0800 Subject: [PATCH 14/34] Revert "refactor: extract RpcPreloader, always use preloader plugin" This reverts commit 574d6ff4a9e201e9335bbbba530f3713c4f48237. --- src/providers/evm/fetchers/index.ts | 21 +++++++++- src/providers/evm/fetchers/rpc.ts | 60 ----------------------------- src/providers/evm/indexer.ts | 13 +------ src/providers/evm/provider.ts | 32 +++++++++++---- 4 files changed, 46 insertions(+), 80 deletions(-) delete mode 100644 src/providers/evm/fetchers/rpc.ts diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/fetchers/index.ts index f3599ff..5f63d83 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/fetchers/index.ts @@ -1,5 +1,22 @@ +import { CheckpointConfig } from '../../../types'; +import { Logger } from '../../../utils/logger'; import { FetchedBlock, Preloader } from './types'; -import { RpcPreloader } from './rpc'; import { HypersyncPreloader } from './hypersync'; -export { FetchedBlock, Preloader, RpcPreloader, HypersyncPreloader }; +export { FetchedBlock, Preloader, HypersyncPreloader }; + +export function createPreloader( + config: CheckpointConfig, + log: Logger +): Preloader | undefined { + if (config.hypersync_api_token) { + log.info('using HyperSync preloader'); + + return new HypersyncPreloader({ + apiToken: config.hypersync_api_token, + rpcUrl: config.network_node_url + }); + } + + return undefined; +} diff --git a/src/providers/evm/fetchers/rpc.ts b/src/providers/evm/fetchers/rpc.ts deleted file mode 100644 index da0e9df..0000000 --- a/src/providers/evm/fetchers/rpc.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { Log } from 'viem'; -import { CheckpointRecord } from '../../../stores/checkpoints'; -import { ContractSourceConfig } from '../../../types'; -import { FetchedBlock, Preloader } from './types'; - -export class RpcPreloader implements Preloader { - private readonly getLogs: ( - fromBlock: number, - toBlock: number, - address: string | string[], - topics?: (string | string[])[] - ) => Promise; - - constructor( - getLogs: ( - fromBlock: number, - toBlock: number, - address: string | string[], - topics?: (string | string[])[] - ) => Promise - ) { - this.getLogs = getLogs; - } - - async getCheckpointsRange( - fromBlock: number, - toBlock: number, - sources: ContractSourceConfig[], - getEventHash: (name: string) => string - ): Promise<{ - checkpoints: CheckpointRecord[]; - logs: Log[]; - blocks: FetchedBlock[]; - }> { - const chunks: ContractSourceConfig[][] = []; - for (let i = 0; i < sources.length; i += 20) { - chunks.push(sources.slice(i, i + 20)); - } - - let logs: Log[] = []; - for (const chunk of chunks) { - const address = chunk.map(source => source.contract); - const topics = chunk.flatMap(source => - source.events.map(event => getEventHash(event.name)) - ); - - const chunkLogs = await this.getLogs(fromBlock, toBlock, address, [ - topics - ]); - logs = logs.concat(chunkLogs); - } - - const checkpoints = logs.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); - - return { checkpoints, logs, blocks: [] }; - } -} diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index dc99c1a..8cdfa24 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { HypersyncPreloader } from './fetchers/hypersync'; +import { createPreloader } from './fetchers'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -21,21 +21,12 @@ export class EvmIndexer extends BaseIndexer { log: Logger; abis?: Record; }) { - let preloader; - if (instance.config.hypersync_api_token) { - log.info('using HyperSync preloader'); - preloader = new HypersyncPreloader({ - apiToken: instance.config.hypersync_api_token, - rpcUrl: instance.config.network_node_url - }); - } - this.provider = new EvmProvider({ instance, log, abis, writers: this.writers, - preloader + preloader: createPreloader(instance.config, log) }); } diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index 69f9333..c1fc35b 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -13,7 +13,6 @@ import { } from 'viem'; import { getRangeHint } from './helpers'; import { FetchedBlock, Preloader } from './fetchers/types'; -import { RpcPreloader } from './fetchers/rpc'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; @@ -39,7 +38,7 @@ const MAX_BLOCKS_PER_REQUEST = 10000; export class EvmProvider extends BaseProvider { private readonly client: PublicClient; - private readonly preloader: Preloader; + private readonly preloader?: Preloader; private readonly writers: Record; private sourceHashes = new Map(); @@ -64,7 +63,7 @@ export class EvmProvider extends BaseProvider { }) }); - this.preloader = preloader ?? new RpcPreloader(this.getLogs.bind(this)); + this.preloader = preloader; this.writers = writers; } @@ -530,16 +529,35 @@ export class EvmProvider extends BaseProvider { toBlock: number ): Promise { const sources = this.instance.getCurrentSources(toBlock); - const { checkpoints, logs, blocks } = - await this.preloader.getCheckpointsRange( + + let checkpoints: CheckpointRecord[]; + let logs: Log[]; + + if (this.preloader) { + const result = await this.preloader.getCheckpointsRange( fromBlock, toBlock, sources, name => this.getEventHash(name) ); + checkpoints = result.checkpoints; + logs = result.logs; + + for (const block of result.blocks) { + this.blockCache.set(block.number, block); + } + } else { + const events = await this.getLogsForSources({ + fromBlock, + toBlock, + sources + }); - for (const block of blocks) { - this.blockCache.set(block.number, block); + checkpoints = events.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); + logs = events; } for (const log of logs) { From 5cbe002233bbb29dac2af2de30607de3f8c3d349 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 23:28:43 +0800 Subject: [PATCH 15/34] refactor: rename fetchers to preloaders --- src/providers/evm/indexer.ts | 2 +- .../evm/{fetchers => preloaders}/hypersync.ts | 0 .../evm/{fetchers => preloaders}/index.ts | 14 ++++++-------- .../evm/{fetchers => preloaders}/types.ts | 0 src/providers/evm/provider.ts | 2 +- src/providers/evm/types.ts | 2 +- .../evm/{fetchers => preloaders}/hypersync.test.ts | 2 +- test/unit/providers/evm/provider-cache.test.ts | 2 +- 8 files changed, 11 insertions(+), 13 deletions(-) rename src/providers/evm/{fetchers => preloaders}/hypersync.ts (100%) rename src/providers/evm/{fetchers => preloaders}/index.ts (61%) rename src/providers/evm/{fetchers => preloaders}/types.ts (100%) rename test/unit/providers/evm/{fetchers => preloaders}/hypersync.test.ts (92%) diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index 8cdfa24..ab02fa3 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { createPreloader } from './fetchers'; +import { createPreloader } from './preloaders'; import { EvmProvider } from './provider'; import { Writer } from './types'; diff --git a/src/providers/evm/fetchers/hypersync.ts b/src/providers/evm/preloaders/hypersync.ts similarity index 100% rename from src/providers/evm/fetchers/hypersync.ts rename to src/providers/evm/preloaders/hypersync.ts diff --git a/src/providers/evm/fetchers/index.ts b/src/providers/evm/preloaders/index.ts similarity index 61% rename from src/providers/evm/fetchers/index.ts rename to src/providers/evm/preloaders/index.ts index 5f63d83..a9fa15e 100644 --- a/src/providers/evm/fetchers/index.ts +++ b/src/providers/evm/preloaders/index.ts @@ -9,14 +9,12 @@ export function createPreloader( config: CheckpointConfig, log: Logger ): Preloader | undefined { - if (config.hypersync_api_token) { - log.info('using HyperSync preloader'); + if (!config.hypersync_api_token) return; - return new HypersyncPreloader({ - apiToken: config.hypersync_api_token, - rpcUrl: config.network_node_url - }); - } + log.info('using HyperSync preloader'); - return undefined; + return new HypersyncPreloader({ + apiToken: config.hypersync_api_token, + rpcUrl: config.network_node_url + }); } diff --git a/src/providers/evm/fetchers/types.ts b/src/providers/evm/preloaders/types.ts similarity index 100% rename from src/providers/evm/fetchers/types.ts rename to src/providers/evm/preloaders/types.ts diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index c1fc35b..9582d9f 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -12,7 +12,7 @@ import { stringToBytes } from 'viem'; import { getRangeHint } from './helpers'; -import { FetchedBlock, Preloader } from './fetchers/types'; +import { FetchedBlock, Preloader } from './preloaders/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; diff --git a/src/providers/evm/types.ts b/src/providers/evm/types.ts index e3d2f64..429f67c 100644 --- a/src/providers/evm/types.ts +++ b/src/providers/evm/types.ts @@ -7,7 +7,7 @@ import { } from 'viem'; import { BaseWriterParams } from '../../types'; -export { FetchedBlock, Preloader } from './fetchers/types'; +export { FetchedBlock, Preloader } from './preloaders/types'; export class CustomJsonRpcError extends Error { constructor( diff --git a/test/unit/providers/evm/fetchers/hypersync.test.ts b/test/unit/providers/evm/preloaders/hypersync.test.ts similarity index 92% rename from test/unit/providers/evm/fetchers/hypersync.test.ts rename to test/unit/providers/evm/preloaders/hypersync.test.ts index ac56575..7af9d19 100644 --- a/test/unit/providers/evm/fetchers/hypersync.test.ts +++ b/test/unit/providers/evm/preloaders/hypersync.test.ts @@ -1,4 +1,4 @@ -import { HypersyncPreloader } from '../../../../../src/providers/evm/fetchers/hypersync'; +import { HypersyncPreloader } from '../../../../../src/providers/evm/preloaders/hypersync'; describe('HypersyncPreloader', () => { it('should be instantiated with apiToken and rpcUrl', () => { diff --git a/test/unit/providers/evm/provider-cache.test.ts b/test/unit/providers/evm/provider-cache.test.ts index 53e558c..88bef80 100644 --- a/test/unit/providers/evm/provider-cache.test.ts +++ b/test/unit/providers/evm/provider-cache.test.ts @@ -1,5 +1,5 @@ import { EvmProvider } from '../../../../src/providers/evm/provider'; -import { FetchedBlock } from '../../../../src/providers/evm/fetchers/types'; +import { FetchedBlock } from '../../../../src/providers/evm/preloaders/types'; describe('EvmProvider block cache', () => { it('should store and retrieve cached blocks', () => { From 345d1be9ed9ead34ef6fe073f350eada2d4ac797 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Thu, 9 Apr 2026 23:45:47 +0800 Subject: [PATCH 16/34] refactor: simplify hypersync preloader and cache management - Use chunk() utility instead of manual chunking loops - Derive checkpoints centrally in provider instead of preloader - Replace spread/concat with safe loops to avoid stack overflow - Remove unnecessary type re-exports - Self-manage blockCache within getCheckpointsRange --- src/providers/evm/indexer.ts | 4 +- src/providers/evm/preloaders/hypersync.ts | 69 +++++-------------- src/providers/evm/preloaders/index.ts | 7 +- src/providers/evm/preloaders/types.ts | 2 - src/providers/evm/provider.ts | 53 +++++++------- src/providers/evm/types.ts | 2 - .../evm/preloaders/hypersync.test.ts | 11 --- .../unit/providers/evm/provider-cache.test.ts | 29 -------- 8 files changed, 50 insertions(+), 127 deletions(-) delete mode 100644 test/unit/providers/evm/preloaders/hypersync.test.ts delete mode 100644 test/unit/providers/evm/provider-cache.test.ts diff --git a/src/providers/evm/indexer.ts b/src/providers/evm/indexer.ts index ab02fa3..f848839 100644 --- a/src/providers/evm/indexer.ts +++ b/src/providers/evm/indexer.ts @@ -1,6 +1,5 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { createPreloader } from './preloaders'; import { EvmProvider } from './provider'; import { Writer } from './types'; @@ -25,8 +24,7 @@ export class EvmIndexer extends BaseIndexer { instance, log, abis, - writers: this.writers, - preloader: createPreloader(instance.config, log) + writers: this.writers }); } diff --git a/src/providers/evm/preloaders/hypersync.ts b/src/providers/evm/preloaders/hypersync.ts index cf8e8b7..a1545f3 100644 --- a/src/providers/evm/preloaders/hypersync.ts +++ b/src/providers/evm/preloaders/hypersync.ts @@ -1,7 +1,7 @@ import { Log } from 'viem'; -import { CheckpointRecord } from '../../../stores/checkpoints'; -import { ContractSourceConfig } from '../../../types'; import { FetchedBlock, Preloader } from './types'; +import { ContractSourceConfig } from '../../../types'; +import { chunk } from '../../../utils/helpers'; type HypersyncLog = { block_number?: number; @@ -53,33 +53,12 @@ const FIELD_SELECTION = { }; export class HypersyncPreloader implements Preloader { - private url: string | null = null; + private readonly url: string; private readonly apiToken: string; - private readonly rpcUrl: string; - constructor({ apiToken, rpcUrl }: { apiToken: string; rpcUrl: string }) { + constructor({ apiToken, chainId }: { apiToken: string; chainId: number }) { this.apiToken = apiToken; - this.rpcUrl = rpcUrl; - } - - private async getUrl(): Promise { - if (!this.url) { - const res = await fetch(this.rpcUrl, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - jsonrpc: '2.0', - id: 1, - method: 'eth_chainId', - params: [] - }) - }); - const json = await res.json(); - const chainId = parseInt(json.result, 16); - this.url = `https://${chainId}.hypersync.xyz`; - } - - return this.url; + this.url = `https://${chainId}.hypersync.xyz`; } async getCheckpointsRange( @@ -88,21 +67,15 @@ export class HypersyncPreloader implements Preloader { sources: ContractSourceConfig[], getEventHash: (name: string) => string ): Promise<{ - checkpoints: CheckpointRecord[]; logs: Log[]; blocks: FetchedBlock[]; }> { - const chunks: ContractSourceConfig[][] = []; - for (let i = 0; i < sources.length; i += 20) { - chunks.push(sources.slice(i, i + 20)); - } - - let allLogs: Log[] = []; + const allLogs: Log[] = []; const allBlocks: FetchedBlock[] = []; - for (const chunk of chunks) { - const addresses = chunk.map(source => source.contract); - const topics = chunk.flatMap(source => + for (const sourceChunk of chunk(sources, 20)) { + const addresses = sourceChunk.map(source => source.contract); + const topics = sourceChunk.flatMap(source => source.events.map(event => getEventHash(event.name)) ); @@ -112,16 +85,11 @@ export class HypersyncPreloader implements Preloader { addresses, [topics] ); - allLogs = allLogs.concat(logs); - allBlocks.push(...blocks); + for (const log of logs) allLogs.push(log); + for (const block of blocks) allBlocks.push(block); } - const checkpoints = allLogs.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); - - return { checkpoints, logs: allLogs, blocks: allBlocks }; + return { logs: allLogs, blocks: allBlocks }; } private async fetchLogs( @@ -137,7 +105,7 @@ export class HypersyncPreloader implements Preloader { : [topics[0]] : undefined; - let allLogs: HypersyncLog[] = []; + const allLogs: HypersyncLog[] = []; const allBlocks: FetchedBlock[] = []; let currentFrom = fromBlock; const exclusiveToBlock = toBlock + 1; @@ -155,8 +123,9 @@ export class HypersyncPreloader implements Preloader { field_selection: FIELD_SELECTION }); - allBlocks.push(...this.convertBlocks(response.data.blocks)); - allLogs = allLogs.concat(response.data.logs); + for (const block of this.convertBlocks(response.data.blocks)) + allBlocks.push(block); + for (const log of response.data.logs) allLogs.push(log); if (response.next_block >= exclusiveToBlock) break; currentFrom = response.next_block; @@ -168,7 +137,7 @@ export class HypersyncPreloader implements Preloader { private async query( body: Record ): Promise { - const url = await this.getUrl(); + const url = this.url; const res = await fetch(`${url}/query`, { method: 'POST', headers: { @@ -221,9 +190,7 @@ export class HypersyncPreloader implements Preloader { blockNumber: log.block_number != null ? BigInt(log.block_number) : null, data: (log.data ?? '0x') as `0x${string}`, logIndex: log.log_index ?? 0, - transactionHash: (log.transaction_hash ?? null) as - | `0x${string}` - | null, + transactionHash: (log.transaction_hash ?? null) as `0x${string}` | null, transactionIndex: log.transaction_index ?? 0, removed: log.removed ?? false, topics diff --git a/src/providers/evm/preloaders/index.ts b/src/providers/evm/preloaders/index.ts index a9fa15e..02e9ac1 100644 --- a/src/providers/evm/preloaders/index.ts +++ b/src/providers/evm/preloaders/index.ts @@ -1,12 +1,13 @@ +import { HypersyncPreloader } from './hypersync'; +import { FetchedBlock, Preloader } from './types'; import { CheckpointConfig } from '../../../types'; import { Logger } from '../../../utils/logger'; -import { FetchedBlock, Preloader } from './types'; -import { HypersyncPreloader } from './hypersync'; export { FetchedBlock, Preloader, HypersyncPreloader }; export function createPreloader( config: CheckpointConfig, + chainId: number, log: Logger ): Preloader | undefined { if (!config.hypersync_api_token) return; @@ -15,6 +16,6 @@ export function createPreloader( return new HypersyncPreloader({ apiToken: config.hypersync_api_token, - rpcUrl: config.network_node_url + chainId }); } diff --git a/src/providers/evm/preloaders/types.ts b/src/providers/evm/preloaders/types.ts index 56e086f..73f968b 100644 --- a/src/providers/evm/preloaders/types.ts +++ b/src/providers/evm/preloaders/types.ts @@ -1,5 +1,4 @@ import { Log } from 'viem'; -import { CheckpointRecord } from '../../../stores/checkpoints'; import { ContractSourceConfig } from '../../../types'; export type FetchedBlock = { @@ -16,7 +15,6 @@ export type Preloader = { sources: ContractSourceConfig[], getEventHash: (name: string) => string ): Promise<{ - checkpoints: CheckpointRecord[]; logs: Log[]; blocks: FetchedBlock[]; }>; diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index 9582d9f..d5a66a9 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -12,11 +12,12 @@ import { stringToBytes } from 'viem'; import { getRangeHint } from './helpers'; +import { createPreloader } from './preloaders'; import { FetchedBlock, Preloader } from './preloaders/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; -import { sleep } from '../../utils/helpers'; +import { chunk, sleep } from '../../utils/helpers'; import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; type GetLogsBlockHashFilter = { @@ -38,7 +39,8 @@ const MAX_BLOCKS_PER_REQUEST = 10000; export class EvmProvider extends BaseProvider { private readonly client: PublicClient; - private readonly preloader?: Preloader; + private preloader?: Preloader; + private preloaderInitialized = false; private readonly writers: Record; private sourceHashes = new Map(); @@ -49,11 +51,9 @@ export class EvmProvider extends BaseProvider { instance, log, abis, - writers, - preloader + writers }: ConstructorParameters[0] & { writers: Record; - preloader?: Preloader; }) { super({ instance, log, abis }); @@ -63,7 +63,6 @@ export class EvmProvider extends BaseProvider { }) }); - this.preloader = preloader; this.writers = writers; } @@ -503,15 +502,10 @@ export class EvmProvider extends BaseProvider { toBlock: number; sources: ContractSourceConfig[]; }): Promise { - const chunks: ContractSourceConfig[][] = []; - for (let i = 0; i < sources.length; i += 20) { - chunks.push(sources.slice(i, i + 20)); - } - let events: Log[] = []; - for (const chunk of chunks) { - const address = chunk.map(source => source.contract); - const topics = chunk.flatMap(source => + for (const sourceChunk of chunk(sources, 20)) { + const address = sourceChunk.map(source => source.contract); + const topics = sourceChunk.flatMap(source => source.events.map(event => this.getEventHash(event.name)) ); @@ -524,40 +518,44 @@ export class EvmProvider extends BaseProvider { return events; } + private async getPreloader(): Promise { + if (!this.preloaderInitialized) { + this.preloaderInitialized = true; + const chainId = await this.client.getChainId(); + this.preloader = createPreloader(this.instance.config, chainId, this.log); + } + return this.preloader; + } + async getCheckpointsRange( fromBlock: number, toBlock: number ): Promise { const sources = this.instance.getCurrentSources(toBlock); + this.blockCache.clear(); + + const preloader = await this.getPreloader(); - let checkpoints: CheckpointRecord[]; let logs: Log[]; - if (this.preloader) { - const result = await this.preloader.getCheckpointsRange( + if (preloader) { + const result = await preloader.getCheckpointsRange( fromBlock, toBlock, sources, name => this.getEventHash(name) ); - checkpoints = result.checkpoints; logs = result.logs; for (const block of result.blocks) { this.blockCache.set(block.number, block); } } else { - const events = await this.getLogsForSources({ + logs = await this.getLogsForSources({ fromBlock, toBlock, sources }); - - checkpoints = events.map(log => ({ - blockNumber: Number(log.blockNumber), - contractAddress: log.address - })); - logs = events; } for (const log of logs) { @@ -570,7 +568,10 @@ export class EvmProvider extends BaseProvider { this.logsCache.get(log.blockNumber)?.push(log); } - return checkpoints; + return logs.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); } getEventHash(eventName: string) { diff --git a/src/providers/evm/types.ts b/src/providers/evm/types.ts index 429f67c..9fb7ac7 100644 --- a/src/providers/evm/types.ts +++ b/src/providers/evm/types.ts @@ -7,8 +7,6 @@ import { } from 'viem'; import { BaseWriterParams } from '../../types'; -export { FetchedBlock, Preloader } from './preloaders/types'; - export class CustomJsonRpcError extends Error { constructor( message: string, diff --git a/test/unit/providers/evm/preloaders/hypersync.test.ts b/test/unit/providers/evm/preloaders/hypersync.test.ts deleted file mode 100644 index 7af9d19..0000000 --- a/test/unit/providers/evm/preloaders/hypersync.test.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { HypersyncPreloader } from '../../../../../src/providers/evm/preloaders/hypersync'; - -describe('HypersyncPreloader', () => { - it('should be instantiated with apiToken and rpcUrl', () => { - const preloader = new HypersyncPreloader({ - apiToken: 'test-token', - rpcUrl: 'https://rpc.example.com' - }); - expect(preloader).toBeDefined(); - }); -}); diff --git a/test/unit/providers/evm/provider-cache.test.ts b/test/unit/providers/evm/provider-cache.test.ts deleted file mode 100644 index 88bef80..0000000 --- a/test/unit/providers/evm/provider-cache.test.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { EvmProvider } from '../../../../src/providers/evm/provider'; -import { FetchedBlock } from '../../../../src/providers/evm/preloaders/types'; - -describe('EvmProvider block cache', () => { - it('should store and retrieve cached blocks', () => { - const provider = Object.create(EvmProvider.prototype); - provider.blockCache = new Map(); - - const block100: FetchedBlock = { - number: 100, - hash: '0xabc', - parentHash: '0xdef', - timestamp: 1700000000 - }; - const block101: FetchedBlock = { - number: 101, - hash: '0x123', - parentHash: '0xabc', - timestamp: 1700000012 - }; - - provider.blockCache.set(100, block100); - provider.blockCache.set(101, block101); - - expect(provider.blockCache.get(100)).toEqual(block100); - expect(provider.blockCache.get(101)).toEqual(block101); - expect(provider.blockCache.has(102)).toBe(false); - }); -}); From 2f7413937c9d146ff1f8ec434e420a636d70d4c1 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Fri, 10 Apr 2026 00:56:49 +0800 Subject: [PATCH 17/34] refactor: simplify hypersync preloader by inlining helpers --- src/providers/evm/preloaders/hypersync.ts | 193 ++++++++-------------- 1 file changed, 66 insertions(+), 127 deletions(-) diff --git a/src/providers/evm/preloaders/hypersync.ts b/src/providers/evm/preloaders/hypersync.ts index a1545f3..06d116f 100644 --- a/src/providers/evm/preloaders/hypersync.ts +++ b/src/providers/evm/preloaders/hypersync.ts @@ -3,34 +3,29 @@ import { FetchedBlock, Preloader } from './types'; import { ContractSourceConfig } from '../../../types'; import { chunk } from '../../../utils/helpers'; -type HypersyncLog = { - block_number?: number; - log_index?: number; - transaction_index?: number; - transaction_hash?: string; - block_hash?: string; - address?: string; - data?: string; - topic0?: string | null; - topic1?: string | null; - topic2?: string | null; - topic3?: string | null; - removed?: boolean; -}; - -type HypersyncBlock = { - number?: number; - timestamp?: number; - hash?: string; - parent_hash?: string; -}; - type HypersyncResponse = { next_block: number; - archive_height?: number; data: { - blocks: HypersyncBlock[]; - logs: HypersyncLog[]; + blocks: { + number?: number; + timestamp?: number; + hash?: string; + parent_hash?: string; + }[]; + logs: { + block_number?: number; + log_index?: number; + transaction_index?: number; + transaction_hash?: string; + block_hash?: string; + address?: string; + data?: string; + topic0?: string | null; + topic1?: string | null; + topic2?: string | null; + topic3?: string | null; + removed?: boolean; + }[]; }; }; @@ -66,79 +61,67 @@ export class HypersyncPreloader implements Preloader { toBlock: number, sources: ContractSourceConfig[], getEventHash: (name: string) => string - ): Promise<{ - logs: Log[]; - blocks: FetchedBlock[]; - }> { + ): Promise<{ logs: Log[]; blocks: FetchedBlock[] }> { const allLogs: Log[] = []; const allBlocks: FetchedBlock[] = []; for (const sourceChunk of chunk(sources, 20)) { const addresses = sourceChunk.map(source => source.contract); - const topics = sourceChunk.flatMap(source => + const topic0 = sourceChunk.flatMap(source => source.events.map(event => getEventHash(event.name)) ); - const { logs, blocks } = await this.fetchLogs( - fromBlock, - toBlock, - addresses, - [topics] - ); - for (const log of logs) allLogs.push(log); - for (const block of blocks) allBlocks.push(block); - } + let currentFrom = fromBlock; + const exclusiveTo = toBlock + 1; - return { logs: allLogs, blocks: allBlocks }; - } + while (currentFrom < exclusiveTo) { + const response = await this.query({ + from_block: currentFrom, + to_block: exclusiveTo, + logs: [{ address: addresses, topics: [topic0] }], + field_selection: FIELD_SELECTION + }); - private async fetchLogs( - fromBlock: number, - toBlock: number, - addresses: string[], - topics: (string | string[])[] - ): Promise<{ logs: Log[]; blocks: FetchedBlock[] }> { - const topic0 = - topics.length > 0 - ? Array.isArray(topics[0]) - ? topics[0] - : [topics[0]] - : undefined; - - const allLogs: HypersyncLog[] = []; - const allBlocks: FetchedBlock[] = []; - let currentFrom = fromBlock; - const exclusiveToBlock = toBlock + 1; - - while (currentFrom < exclusiveToBlock) { - const response = await this.query({ - from_block: currentFrom, - to_block: exclusiveToBlock, - logs: [ - { - address: addresses, - topics: topic0 ? [topic0] : undefined + // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays + for (const block of response.data.blocks) { + if (block.number != null && block.timestamp != null && block.hash && block.parent_hash) { + allBlocks.push({ + number: block.number, + hash: block.hash, + parentHash: block.parent_hash, + timestamp: block.timestamp + }); } - ], - field_selection: FIELD_SELECTION - }); - - for (const block of this.convertBlocks(response.data.blocks)) - allBlocks.push(block); - for (const log of response.data.logs) allLogs.push(log); - - if (response.next_block >= exclusiveToBlock) break; - currentFrom = response.next_block; + } + + for (const log of response.data.logs) { + const topics = [log.topic0, log.topic1, log.topic2, log.topic3].filter( + (t): t is string => !!t + ) as `0x${string}`[]; + + allLogs.push({ + address: (log.address ?? '0x') as `0x${string}`, + blockHash: (log.block_hash ?? null) as `0x${string}` | null, + blockNumber: log.block_number != null ? BigInt(log.block_number) : null, + data: (log.data ?? '0x') as `0x${string}`, + logIndex: log.log_index ?? 0, + transactionHash: (log.transaction_hash ?? null) as `0x${string}` | null, + transactionIndex: log.transaction_index ?? 0, + removed: log.removed ?? false, + topics + } as Log); + } + + if (response.next_block >= exclusiveTo) break; + currentFrom = response.next_block; + } } - return { logs: this.convertLogs(allLogs), blocks: allBlocks }; + return { logs: allLogs, blocks: allBlocks }; } - private async query( - body: Record - ): Promise { - const url = this.url; - const res = await fetch(`${url}/query`, { + private async query(body: Record): Promise { + const res = await fetch(`${this.url}/query`, { method: 'POST', headers: { 'Content-Type': 'application/json', @@ -153,48 +136,4 @@ export class HypersyncPreloader implements Preloader { return res.json(); } - - private convertBlocks(blocks: HypersyncBlock[]): FetchedBlock[] { - const result: FetchedBlock[] = []; - - for (const block of blocks) { - if ( - block.number != null && - block.timestamp != null && - block.hash != null && - block.parent_hash != null - ) { - result.push({ - number: block.number, - hash: block.hash, - parentHash: block.parent_hash, - timestamp: block.timestamp - }); - } - } - - return result; - } - - private convertLogs(hypersyncLogs: HypersyncLog[]): Log[] { - return hypersyncLogs.map(log => { - const topics: `0x${string}`[] = []; - if (log.topic0) topics.push(log.topic0 as `0x${string}`); - if (log.topic1) topics.push(log.topic1 as `0x${string}`); - if (log.topic2) topics.push(log.topic2 as `0x${string}`); - if (log.topic3) topics.push(log.topic3 as `0x${string}`); - - return { - address: (log.address ?? '0x') as `0x${string}`, - blockHash: (log.block_hash ?? null) as `0x${string}` | null, - blockNumber: log.block_number != null ? BigInt(log.block_number) : null, - data: (log.data ?? '0x') as `0x${string}`, - logIndex: log.log_index ?? 0, - transactionHash: (log.transaction_hash ?? null) as `0x${string}` | null, - transactionIndex: log.transaction_index ?? 0, - removed: log.removed ?? false, - topics - } as Log; - }); - } } From eb0ffa048f17525617d8a2f8c882d5062a64d486 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Fri, 10 Apr 2026 09:26:47 +0800 Subject: [PATCH 18/34] style: apply eslint formatting --- src/providers/evm/preloaders/hypersync.ts | 27 +++++++++++++++++------ 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/src/providers/evm/preloaders/hypersync.ts b/src/providers/evm/preloaders/hypersync.ts index 06d116f..2fe1889 100644 --- a/src/providers/evm/preloaders/hypersync.ts +++ b/src/providers/evm/preloaders/hypersync.ts @@ -84,7 +84,12 @@ export class HypersyncPreloader implements Preloader { // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays for (const block of response.data.blocks) { - if (block.number != null && block.timestamp != null && block.hash && block.parent_hash) { + if ( + block.number != null && + block.timestamp != null && + block.hash && + block.parent_hash + ) { allBlocks.push({ number: block.number, hash: block.hash, @@ -95,17 +100,23 @@ export class HypersyncPreloader implements Preloader { } for (const log of response.data.logs) { - const topics = [log.topic0, log.topic1, log.topic2, log.topic3].filter( - (t): t is string => !!t - ) as `0x${string}`[]; + const topics = [ + log.topic0, + log.topic1, + log.topic2, + log.topic3 + ].filter((t): t is string => !!t) as `0x${string}`[]; allLogs.push({ address: (log.address ?? '0x') as `0x${string}`, blockHash: (log.block_hash ?? null) as `0x${string}` | null, - blockNumber: log.block_number != null ? BigInt(log.block_number) : null, + blockNumber: + log.block_number != null ? BigInt(log.block_number) : null, data: (log.data ?? '0x') as `0x${string}`, logIndex: log.log_index ?? 0, - transactionHash: (log.transaction_hash ?? null) as `0x${string}` | null, + transactionHash: (log.transaction_hash ?? null) as + | `0x${string}` + | null, transactionIndex: log.transaction_index ?? 0, removed: log.removed ?? false, topics @@ -120,7 +131,9 @@ export class HypersyncPreloader implements Preloader { return { logs: allLogs, blocks: allBlocks }; } - private async query(body: Record): Promise { + private async query( + body: Record + ): Promise { const res = await fetch(`${this.url}/query`, { method: 'POST', headers: { From d695ffa83eb06bab9a407bf239b75b27efea451a Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Fri, 10 Apr 2026 09:31:02 +0800 Subject: [PATCH 19/34] refactor: revert unrelated chunk refactor in provider --- src/providers/evm/provider.ts | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index d5a66a9..a8b5d17 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -17,7 +17,7 @@ import { FetchedBlock, Preloader } from './preloaders/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; -import { chunk, sleep } from '../../utils/helpers'; +import { sleep } from '../../utils/helpers'; import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; type GetLogsBlockHashFilter = { @@ -502,10 +502,15 @@ export class EvmProvider extends BaseProvider { toBlock: number; sources: ContractSourceConfig[]; }): Promise { + const chunks: ContractSourceConfig[][] = []; + for (let i = 0; i < sources.length; i += 20) { + chunks.push(sources.slice(i, i + 20)); + } + let events: Log[] = []; - for (const sourceChunk of chunk(sources, 20)) { - const address = sourceChunk.map(source => source.contract); - const topics = sourceChunk.flatMap(source => + for (const chunk of chunks) { + const address = chunk.map(source => source.contract); + const topics = chunk.flatMap(source => source.events.map(event => this.getEventHash(event.name)) ); @@ -536,7 +541,7 @@ export class EvmProvider extends BaseProvider { const preloader = await this.getPreloader(); - let logs: Log[]; + let events: Log[]; if (preloader) { const result = await preloader.getCheckpointsRange( @@ -545,20 +550,20 @@ export class EvmProvider extends BaseProvider { sources, name => this.getEventHash(name) ); - logs = result.logs; + events = result.logs; for (const block of result.blocks) { this.blockCache.set(block.number, block); } } else { - logs = await this.getLogsForSources({ + events = await this.getLogsForSources({ fromBlock, toBlock, sources }); } - for (const log of logs) { + for (const log of events) { if (log.blockNumber === null) continue; if (!this.logsCache.has(log.blockNumber)) { @@ -568,7 +573,7 @@ export class EvmProvider extends BaseProvider { this.logsCache.get(log.blockNumber)?.push(log); } - return logs.map(log => ({ + return events.map(log => ({ blockNumber: Number(log.blockNumber), contractAddress: log.address })); From 5a56fe969eef3bef65959995fe72745f703e3d36 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:36:20 +0800 Subject: [PATCH 20/34] refactor: extract HyperSync into dedicated HyperSyncEvmProvider MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Create HyperSyncEvmProvider extending EvmProvider so HyperSync usage is an explicit provider choice rather than a config-driven internal toggle. Remove preloader abstraction layer and hypersync_api_token from shared config schema — API token is now a constructor argument on HyperSyncEvmIndexer. --- src/providers/evm/hypersync-indexer.ts | 39 ++++++++ .../hypersync.ts => hypersync-provider.ts} | 95 ++++++++++++++++--- src/providers/evm/index.ts | 2 + src/providers/evm/preloaders/index.ts | 21 ---- src/providers/evm/preloaders/types.ts | 21 ---- src/providers/evm/provider.ts | 74 ++++----------- src/schemas.ts | 3 +- 7 files changed, 143 insertions(+), 112 deletions(-) create mode 100644 src/providers/evm/hypersync-indexer.ts rename src/providers/evm/{preloaders/hypersync.ts => hypersync-provider.ts} (61%) delete mode 100644 src/providers/evm/preloaders/index.ts delete mode 100644 src/providers/evm/preloaders/types.ts diff --git a/src/providers/evm/hypersync-indexer.ts b/src/providers/evm/hypersync-indexer.ts new file mode 100644 index 0000000..03a3703 --- /dev/null +++ b/src/providers/evm/hypersync-indexer.ts @@ -0,0 +1,39 @@ +import { Logger } from '../../utils/logger'; +import { BaseIndexer, Instance } from '../base'; +import { HyperSyncEvmProvider } from './hypersync-provider'; +import { Writer } from './types'; + +export class HyperSyncEvmIndexer extends BaseIndexer { + private writers: Record; + private apiToken: string; + + constructor(writers: Record, apiToken: string) { + super(); + this.writers = writers; + this.apiToken = apiToken; + } + + init({ + instance, + log, + abis + }: { + instance: Instance; + log: Logger; + abis?: Record; + }) { + log.info('using HyperSync provider'); + + this.provider = new HyperSyncEvmProvider({ + instance, + log, + abis, + writers: this.writers, + apiToken: this.apiToken + }); + } + + public getHandlers(): string[] { + return Object.keys(this.writers); + } +} diff --git a/src/providers/evm/preloaders/hypersync.ts b/src/providers/evm/hypersync-provider.ts similarity index 61% rename from src/providers/evm/preloaders/hypersync.ts rename to src/providers/evm/hypersync-provider.ts index 2fe1889..187cef8 100644 --- a/src/providers/evm/preloaders/hypersync.ts +++ b/src/providers/evm/hypersync-provider.ts @@ -1,7 +1,16 @@ import { Log } from 'viem'; -import { FetchedBlock, Preloader } from './types'; -import { ContractSourceConfig } from '../../../types'; -import { chunk } from '../../../utils/helpers'; +import { EvmProvider } from './provider'; +import { Block } from './types'; +import { CheckpointRecord } from '../../stores/checkpoints'; +import { ContractSourceConfig } from '../../types'; +import { chunk } from '../../utils/helpers'; + +type FetchedBlock = { + number: number; + hash: string; + parentHash: string; + timestamp: number; +}; type HypersyncResponse = { next_block: number; @@ -47,20 +56,72 @@ const FIELD_SELECTION = { ] }; -export class HypersyncPreloader implements Preloader { - private readonly url: string; +export class HyperSyncEvmProvider extends EvmProvider { private readonly apiToken: string; + private hypersyncUrl?: string; + private blockCache = new Map(); + + constructor( + params: ConstructorParameters[0] & { + apiToken: string; + } + ) { + super(params); + this.apiToken = params.apiToken; + } + + protected async fetchBlock(blockNumber: number): Promise { + const cached = this.blockCache.get(blockNumber); + if (cached) { + this.blockCache.delete(blockNumber); + return { + number: BigInt(cached.number), + hash: cached.hash, + parentHash: cached.parentHash, + timestamp: BigInt(cached.timestamp) + } as Block; + } - constructor({ apiToken, chainId }: { apiToken: string; chainId: number }) { - this.apiToken = apiToken; - this.url = `https://${chainId}.hypersync.xyz`; + return super.fetchBlock(blockNumber); } async getCheckpointsRange( + fromBlock: number, + toBlock: number + ): Promise { + const sources = this.instance.getCurrentSources(toBlock); + this.blockCache.clear(); + + const { logs, blocks } = await this.queryCheckpointsRange( + fromBlock, + toBlock, + sources + ); + + for (const block of blocks) { + this.blockCache.set(block.number, block); + } + + for (const log of logs) { + if (log.blockNumber === null) continue; + + if (!this.logsCache.has(log.blockNumber)) { + this.logsCache.set(log.blockNumber, []); + } + + this.logsCache.get(log.blockNumber)?.push(log); + } + + return logs.map(log => ({ + blockNumber: Number(log.blockNumber), + contractAddress: log.address + })); + } + + private async queryCheckpointsRange( fromBlock: number, toBlock: number, - sources: ContractSourceConfig[], - getEventHash: (name: string) => string + sources: ContractSourceConfig[] ): Promise<{ logs: Log[]; blocks: FetchedBlock[] }> { const allLogs: Log[] = []; const allBlocks: FetchedBlock[] = []; @@ -68,7 +129,7 @@ export class HypersyncPreloader implements Preloader { for (const sourceChunk of chunk(sources, 20)) { const addresses = sourceChunk.map(source => source.contract); const topic0 = sourceChunk.flatMap(source => - source.events.map(event => getEventHash(event.name)) + source.events.map(event => this.getEventHash(event.name)) ); let currentFrom = fromBlock; @@ -131,10 +192,20 @@ export class HypersyncPreloader implements Preloader { return { logs: allLogs, blocks: allBlocks }; } + private async getHypersyncUrl(): Promise { + if (!this.hypersyncUrl) { + const chainId = await this.getChainId(); + this.hypersyncUrl = `https://${chainId}.hypersync.xyz`; + } + return this.hypersyncUrl; + } + private async query( body: Record ): Promise { - const res = await fetch(`${this.url}/query`, { + const url = await this.getHypersyncUrl(); + + const res = await fetch(`${url}/query`, { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/src/providers/evm/index.ts b/src/providers/evm/index.ts index e5c759a..bae71b1 100644 --- a/src/providers/evm/index.ts +++ b/src/providers/evm/index.ts @@ -1,3 +1,5 @@ export { EvmProvider } from './provider'; +export { HyperSyncEvmProvider } from './hypersync-provider'; export { EvmIndexer } from './indexer'; +export { HyperSyncEvmIndexer } from './hypersync-indexer'; export * from './types'; diff --git a/src/providers/evm/preloaders/index.ts b/src/providers/evm/preloaders/index.ts deleted file mode 100644 index 02e9ac1..0000000 --- a/src/providers/evm/preloaders/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { HypersyncPreloader } from './hypersync'; -import { FetchedBlock, Preloader } from './types'; -import { CheckpointConfig } from '../../../types'; -import { Logger } from '../../../utils/logger'; - -export { FetchedBlock, Preloader, HypersyncPreloader }; - -export function createPreloader( - config: CheckpointConfig, - chainId: number, - log: Logger -): Preloader | undefined { - if (!config.hypersync_api_token) return; - - log.info('using HyperSync preloader'); - - return new HypersyncPreloader({ - apiToken: config.hypersync_api_token, - chainId - }); -} diff --git a/src/providers/evm/preloaders/types.ts b/src/providers/evm/preloaders/types.ts deleted file mode 100644 index 73f968b..0000000 --- a/src/providers/evm/preloaders/types.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Log } from 'viem'; -import { ContractSourceConfig } from '../../../types'; - -export type FetchedBlock = { - number: number; - hash: string; - parentHash: string; - timestamp: number; -}; - -export type Preloader = { - getCheckpointsRange( - fromBlock: number, - toBlock: number, - sources: ContractSourceConfig[], - getEventHash: (name: string) => string - ): Promise<{ - logs: Log[]; - blocks: FetchedBlock[]; - }>; -}; diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index a8b5d17..b165625 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -12,8 +12,6 @@ import { stringToBytes } from 'viem'; import { getRangeHint } from './helpers'; -import { createPreloader } from './preloaders'; -import { FetchedBlock, Preloader } from './preloaders/types'; import { Block, CustomJsonRpcError, EventsData, Writer } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; @@ -39,13 +37,10 @@ const MAX_BLOCKS_PER_REQUEST = 10000; export class EvmProvider extends BaseProvider { private readonly client: PublicClient; - private preloader?: Preloader; - private preloaderInitialized = false; private readonly writers: Record; private sourceHashes = new Map(); - private logsCache = new Map(); - private blockCache = new Map(); + protected logsCache = new Map(); constructor({ instance, @@ -70,6 +65,10 @@ export class EvmProvider extends BaseProvider { return addresses.map(address => getAddress(address)); } + protected async getChainId(): Promise { + return this.client.getChainId(); + } + async getNetworkIdentifier(): Promise { const chainId = await this.client.getChainId(); @@ -90,6 +89,12 @@ export class EvmProvider extends BaseProvider { return block.hash; } + protected async fetchBlock(blockNumber: number): Promise { + return this.client.getBlock({ + blockNumber: BigInt(blockNumber) + }); + } + async processBlock(blockNumber: number, parentHash: string | null) { let block: Block | null = null; let eventsData: EventsData; @@ -100,20 +105,7 @@ export class EvmProvider extends BaseProvider { try { if (!hasPreloadedBlockEvents) { - const cached = this.blockCache.get(blockNumber); - if (cached) { - this.blockCache.delete(blockNumber); - block = { - number: BigInt(cached.number), - hash: cached.hash, - parentHash: cached.parentHash, - timestamp: BigInt(cached.timestamp) - } as Block; - } else { - block = await this.client.getBlock({ - blockNumber: BigInt(blockNumber) - }); - } + block = await this.fetchBlock(blockNumber); } } catch (err) { this.log.error({ blockNumber, err }, 'getting block failed... retrying'); @@ -493,7 +485,7 @@ export class EvmProvider extends BaseProvider { return result; } - async getLogsForSources({ + protected async getLogsForSources({ fromBlock, toBlock, sources @@ -523,45 +515,15 @@ export class EvmProvider extends BaseProvider { return events; } - private async getPreloader(): Promise { - if (!this.preloaderInitialized) { - this.preloaderInitialized = true; - const chainId = await this.client.getChainId(); - this.preloader = createPreloader(this.instance.config, chainId, this.log); - } - return this.preloader; - } - async getCheckpointsRange( fromBlock: number, toBlock: number ): Promise { - const sources = this.instance.getCurrentSources(toBlock); - this.blockCache.clear(); - - const preloader = await this.getPreloader(); - - let events: Log[]; - - if (preloader) { - const result = await preloader.getCheckpointsRange( - fromBlock, - toBlock, - sources, - name => this.getEventHash(name) - ); - events = result.logs; - - for (const block of result.blocks) { - this.blockCache.set(block.number, block); - } - } else { - events = await this.getLogsForSources({ - fromBlock, - toBlock, - sources - }); - } + const events = await this.getLogsForSources({ + fromBlock, + toBlock, + sources: this.instance.getCurrentSources(toBlock) + }); for (const log of events) { if (log.blockNumber === null) continue; diff --git a/src/schemas.ts b/src/schemas.ts index 17a45fe..49d3922 100644 --- a/src/schemas.ts +++ b/src/schemas.ts @@ -26,8 +26,7 @@ export const checkpointConfigSchema = z.object({ global_events: z.array(contractEventConfigSchema).optional(), sources: z.array(contractSourceConfigSchema).optional(), templates: z.record(contractTemplateSchema).optional(), - abis: z.record(z.any()).optional(), - hypersync_api_token: z.string().optional() + abis: z.record(z.any()).optional() }); export const overridesConfigSchema = z.object({ From 2e8d349b3df1771b1ef7dd30bb4e5253c1691a2b Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:38:50 +0800 Subject: [PATCH 21/34] style: rename hyper-sync files to match camelCase convention --- .../evm/{hypersync-indexer.ts => hyper-sync-indexer.ts} | 2 +- .../evm/{hypersync-provider.ts => hyper-sync-provider.ts} | 0 src/providers/evm/index.ts | 4 ++-- 3 files changed, 3 insertions(+), 3 deletions(-) rename src/providers/evm/{hypersync-indexer.ts => hyper-sync-indexer.ts} (92%) rename src/providers/evm/{hypersync-provider.ts => hyper-sync-provider.ts} (100%) diff --git a/src/providers/evm/hypersync-indexer.ts b/src/providers/evm/hyper-sync-indexer.ts similarity index 92% rename from src/providers/evm/hypersync-indexer.ts rename to src/providers/evm/hyper-sync-indexer.ts index 03a3703..e64a87c 100644 --- a/src/providers/evm/hypersync-indexer.ts +++ b/src/providers/evm/hyper-sync-indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { HyperSyncEvmProvider } from './hypersync-provider'; +import { HyperSyncEvmProvider } from './hyper-sync-provider'; import { Writer } from './types'; export class HyperSyncEvmIndexer extends BaseIndexer { diff --git a/src/providers/evm/hypersync-provider.ts b/src/providers/evm/hyper-sync-provider.ts similarity index 100% rename from src/providers/evm/hypersync-provider.ts rename to src/providers/evm/hyper-sync-provider.ts diff --git a/src/providers/evm/index.ts b/src/providers/evm/index.ts index bae71b1..dcf1e4d 100644 --- a/src/providers/evm/index.ts +++ b/src/providers/evm/index.ts @@ -1,5 +1,5 @@ export { EvmProvider } from './provider'; -export { HyperSyncEvmProvider } from './hypersync-provider'; +export { HyperSyncEvmProvider } from './hyper-sync-provider'; export { EvmIndexer } from './indexer'; -export { HyperSyncEvmIndexer } from './hypersync-indexer'; +export { HyperSyncEvmIndexer } from './hyper-sync-indexer'; export * from './types'; From eaf9fc226fbd1b3efae46a91504ddc4ba0c2cae0 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:43:49 +0800 Subject: [PATCH 22/34] style: group methods by visibility (public, protected, private) --- src/providers/evm/provider.ts | 80 +++++++++++++++++------------------ 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/src/providers/evm/provider.ts b/src/providers/evm/provider.ts index b165625..625edd1 100644 --- a/src/providers/evm/provider.ts +++ b/src/providers/evm/provider.ts @@ -65,10 +65,6 @@ export class EvmProvider extends BaseProvider { return addresses.map(address => getAddress(address)); } - protected async getChainId(): Promise { - return this.client.getChainId(); - } - async getNetworkIdentifier(): Promise { const chainId = await this.client.getChainId(); @@ -89,12 +85,6 @@ export class EvmProvider extends BaseProvider { return block.hash; } - protected async fetchBlock(blockNumber: number): Promise { - return this.client.getBlock({ - blockNumber: BigInt(blockNumber) - }); - } - async processBlock(blockNumber: number, parentHash: string | null) { let block: Block | null = null; let eventsData: EventsData; @@ -485,36 +475,6 @@ export class EvmProvider extends BaseProvider { return result; } - protected async getLogsForSources({ - fromBlock, - toBlock, - sources - }: { - fromBlock: number; - toBlock: number; - sources: ContractSourceConfig[]; - }): Promise { - const chunks: ContractSourceConfig[][] = []; - for (let i = 0; i < sources.length; i += 20) { - chunks.push(sources.slice(i, i + 20)); - } - - let events: Log[] = []; - for (const chunk of chunks) { - const address = chunk.map(source => source.contract); - const topics = chunk.flatMap(source => - source.events.map(event => this.getEventHash(event.name)) - ); - - const chunkEvents = await this.getLogs(fromBlock, toBlock, address, [ - topics - ]); - events = events.concat(chunkEvents); - } - - return events; - } - async getCheckpointsRange( fromBlock: number, toBlock: number @@ -557,4 +517,44 @@ export class EvmProvider extends BaseProvider { this.log.info('new source added, clearing logs cache'); this.logsCache.clear(); } + + protected async getChainId(): Promise { + return this.client.getChainId(); + } + + protected async fetchBlock(blockNumber: number): Promise { + return this.client.getBlock({ + blockNumber: BigInt(blockNumber) + }); + } + + protected async getLogsForSources({ + fromBlock, + toBlock, + sources + }: { + fromBlock: number; + toBlock: number; + sources: ContractSourceConfig[]; + }): Promise { + const chunks: ContractSourceConfig[][] = []; + for (let i = 0; i < sources.length; i += 20) { + chunks.push(sources.slice(i, i + 20)); + } + + let events: Log[] = []; + for (const chunk of chunks) { + const address = chunk.map(source => source.contract); + const topics = chunk.flatMap(source => + source.events.map(event => this.getEventHash(event.name)) + ); + + const chunkEvents = await this.getLogs(fromBlock, toBlock, address, [ + topics + ]); + events = events.concat(chunkEvents); + } + + return events; + } } From 4b166b7c863621324714844e7905ce881b905f2c Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:46:58 +0800 Subject: [PATCH 23/34] refactor: use options object for HyperSyncEvmIndexer second argument --- src/providers/evm/hyper-sync-indexer.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/providers/evm/hyper-sync-indexer.ts b/src/providers/evm/hyper-sync-indexer.ts index e64a87c..de3a224 100644 --- a/src/providers/evm/hyper-sync-indexer.ts +++ b/src/providers/evm/hyper-sync-indexer.ts @@ -5,12 +5,12 @@ import { Writer } from './types'; export class HyperSyncEvmIndexer extends BaseIndexer { private writers: Record; - private apiToken: string; + private options: { apiToken: string }; - constructor(writers: Record, apiToken: string) { + constructor(writers: Record, options: { apiToken: string }) { super(); this.writers = writers; - this.apiToken = apiToken; + this.options = options; } init({ @@ -29,7 +29,7 @@ export class HyperSyncEvmIndexer extends BaseIndexer { log, abis, writers: this.writers, - apiToken: this.apiToken + apiToken: this.options.apiToken }); } From 6ca4d8604d7a1e19eb8c374df121e7722b69ad35 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:47:35 +0800 Subject: [PATCH 24/34] fix: throw when HyperSync API token is missing --- src/providers/evm/hyper-sync-provider.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index 187cef8..bee75ab 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -67,6 +67,11 @@ export class HyperSyncEvmProvider extends EvmProvider { } ) { super(params); + + if (!params.apiToken) { + throw new Error('HyperSync API token is required'); + } + this.apiToken = params.apiToken; } From 06971bd86deb0c0345d16ff0169453b09efe431d Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:49:29 +0800 Subject: [PATCH 25/34] fix: move API token validation to indexer constructor --- src/providers/evm/hyper-sync-indexer.ts | 5 +++++ src/providers/evm/hyper-sync-provider.ts | 5 ----- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/providers/evm/hyper-sync-indexer.ts b/src/providers/evm/hyper-sync-indexer.ts index de3a224..2ed7b02 100644 --- a/src/providers/evm/hyper-sync-indexer.ts +++ b/src/providers/evm/hyper-sync-indexer.ts @@ -9,6 +9,11 @@ export class HyperSyncEvmIndexer extends BaseIndexer { constructor(writers: Record, options: { apiToken: string }) { super(); + + if (!options.apiToken) { + throw new Error('HyperSync API token is required'); + } + this.writers = writers; this.options = options; } diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index bee75ab..187cef8 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -67,11 +67,6 @@ export class HyperSyncEvmProvider extends EvmProvider { } ) { super(params); - - if (!params.apiToken) { - throw new Error('HyperSync API token is required'); - } - this.apiToken = params.apiToken; } From 9e86c82715fc8e9fe398462267c9b3362da5708b Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:51:47 +0800 Subject: [PATCH 26/34] style: group methods by visibility in HyperSyncEvmProvider --- src/providers/evm/hyper-sync-provider.ts | 30 ++++++++++++------------ 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index 187cef8..bb08cb0 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -70,21 +70,6 @@ export class HyperSyncEvmProvider extends EvmProvider { this.apiToken = params.apiToken; } - protected async fetchBlock(blockNumber: number): Promise { - const cached = this.blockCache.get(blockNumber); - if (cached) { - this.blockCache.delete(blockNumber); - return { - number: BigInt(cached.number), - hash: cached.hash, - parentHash: cached.parentHash, - timestamp: BigInt(cached.timestamp) - } as Block; - } - - return super.fetchBlock(blockNumber); - } - async getCheckpointsRange( fromBlock: number, toBlock: number @@ -118,6 +103,21 @@ export class HyperSyncEvmProvider extends EvmProvider { })); } + protected async fetchBlock(blockNumber: number): Promise { + const cached = this.blockCache.get(blockNumber); + if (cached) { + this.blockCache.delete(blockNumber); + return { + number: BigInt(cached.number), + hash: cached.hash, + parentHash: cached.parentHash, + timestamp: BigInt(cached.timestamp) + } as Block; + } + + return super.fetchBlock(blockNumber); + } + private async queryCheckpointsRange( fromBlock: number, toBlock: number, From 039d19e593a21a383059eb20cf75429c06aa8257 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 17:56:17 +0800 Subject: [PATCH 27/34] refactor: remove unnecessary source chunking from HyperSync queries The chunk-of-20 pattern was inherited from the RPC provider where eth_getLogs limits the number of addresses per call. HyperSync has no such constraint, so all sources are now queried in a single request. --- src/providers/evm/hyper-sync-provider.ts | 113 +++++++++++------------ 1 file changed, 55 insertions(+), 58 deletions(-) diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index bb08cb0..ab8b668 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -3,7 +3,6 @@ import { EvmProvider } from './provider'; import { Block } from './types'; import { CheckpointRecord } from '../../stores/checkpoints'; import { ContractSourceConfig } from '../../types'; -import { chunk } from '../../utils/helpers'; type FetchedBlock = { number: number; @@ -126,67 +125,65 @@ export class HyperSyncEvmProvider extends EvmProvider { const allLogs: Log[] = []; const allBlocks: FetchedBlock[] = []; - for (const sourceChunk of chunk(sources, 20)) { - const addresses = sourceChunk.map(source => source.contract); - const topic0 = sourceChunk.flatMap(source => - source.events.map(event => this.getEventHash(event.name)) - ); - - let currentFrom = fromBlock; - const exclusiveTo = toBlock + 1; - - while (currentFrom < exclusiveTo) { - const response = await this.query({ - from_block: currentFrom, - to_block: exclusiveTo, - logs: [{ address: addresses, topics: [topic0] }], - field_selection: FIELD_SELECTION - }); - - // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays - for (const block of response.data.blocks) { - if ( - block.number != null && - block.timestamp != null && - block.hash && - block.parent_hash - ) { - allBlocks.push({ - number: block.number, - hash: block.hash, - parentHash: block.parent_hash, - timestamp: block.timestamp - }); - } - } + const addresses = sources.map(source => source.contract); + const topic0 = sources.flatMap(source => + source.events.map(event => this.getEventHash(event.name)) + ); - for (const log of response.data.logs) { - const topics = [ - log.topic0, - log.topic1, - log.topic2, - log.topic3 - ].filter((t): t is string => !!t) as `0x${string}`[]; - - allLogs.push({ - address: (log.address ?? '0x') as `0x${string}`, - blockHash: (log.block_hash ?? null) as `0x${string}` | null, - blockNumber: - log.block_number != null ? BigInt(log.block_number) : null, - data: (log.data ?? '0x') as `0x${string}`, - logIndex: log.log_index ?? 0, - transactionHash: (log.transaction_hash ?? null) as - | `0x${string}` - | null, - transactionIndex: log.transaction_index ?? 0, - removed: log.removed ?? false, - topics - } as Log); + let currentFrom = fromBlock; + const exclusiveTo = toBlock + 1; + + while (currentFrom < exclusiveTo) { + const response = await this.query({ + from_block: currentFrom, + to_block: exclusiveTo, + logs: [{ address: addresses, topics: [topic0] }], + field_selection: FIELD_SELECTION + }); + + // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays + for (const block of response.data.blocks) { + if ( + block.number != null && + block.timestamp != null && + block.hash && + block.parent_hash + ) { + allBlocks.push({ + number: block.number, + hash: block.hash, + parentHash: block.parent_hash, + timestamp: block.timestamp + }); } + } - if (response.next_block >= exclusiveTo) break; - currentFrom = response.next_block; + for (const log of response.data.logs) { + const topics = [ + log.topic0, + log.topic1, + log.topic2, + log.topic3 + ].filter((t): t is string => !!t) as `0x${string}`[]; + + allLogs.push({ + address: (log.address ?? '0x') as `0x${string}`, + blockHash: (log.block_hash ?? null) as `0x${string}` | null, + blockNumber: + log.block_number != null ? BigInt(log.block_number) : null, + data: (log.data ?? '0x') as `0x${string}`, + logIndex: log.log_index ?? 0, + transactionHash: (log.transaction_hash ?? null) as + | `0x${string}` + | null, + transactionIndex: log.transaction_index ?? 0, + removed: log.removed ?? false, + topics + } as Log); } + + if (response.next_block >= exclusiveTo) break; + currentFrom = response.next_block; } return { logs: allLogs, blocks: allBlocks }; From b5b96d4587dac40a88a4306896c40a70b6f8ab6f Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 18:00:25 +0800 Subject: [PATCH 28/34] style: use camelCase for HyperSync references --- src/providers/evm/hyper-sync-provider.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index ab8b668..9b1c653 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -11,7 +11,7 @@ type FetchedBlock = { timestamp: number; }; -type HypersyncResponse = { +type HyperSyncResponse = { next_block: number; data: { blocks: { @@ -57,7 +57,7 @@ const FIELD_SELECTION = { export class HyperSyncEvmProvider extends EvmProvider { private readonly apiToken: string; - private hypersyncUrl?: string; + private hyperSyncUrl?: string; private blockCache = new Map(); constructor( @@ -189,18 +189,18 @@ export class HyperSyncEvmProvider extends EvmProvider { return { logs: allLogs, blocks: allBlocks }; } - private async getHypersyncUrl(): Promise { - if (!this.hypersyncUrl) { + private async getHyperSyncUrl(): Promise { + if (!this.hyperSyncUrl) { const chainId = await this.getChainId(); - this.hypersyncUrl = `https://${chainId}.hypersync.xyz`; + this.hyperSyncUrl = `https://${chainId}.hypersync.xyz`; } - return this.hypersyncUrl; + return this.hyperSyncUrl; } private async query( body: Record - ): Promise { - const url = await this.getHypersyncUrl(); + ): Promise { + const url = await this.getHyperSyncUrl(); const res = await fetch(`${url}/query`, { method: 'POST', From 471799f7e7bc1621e8091f18d2729cceaf3f5c33 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 18:15:14 +0800 Subject: [PATCH 29/34] refactor: make HyperSync response types non-optional Core fields (number, timestamp, hash, address, etc.) are guaranteed by the HyperSync API when requested via field_selection. Removing defensive null checks so malformed responses surface as errors instead of being silently skipped. --- src/providers/evm/hyper-sync-provider.ts | 70 ++++++++++-------------- 1 file changed, 30 insertions(+), 40 deletions(-) diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index 9b1c653..6bfbcbe 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -15,24 +15,24 @@ type HyperSyncResponse = { next_block: number; data: { blocks: { - number?: number; - timestamp?: number; - hash?: string; - parent_hash?: string; + number: number; + timestamp: number; + hash: string; + parent_hash: string; }[]; logs: { - block_number?: number; - log_index?: number; - transaction_index?: number; - transaction_hash?: string; - block_hash?: string; - address?: string; - data?: string; - topic0?: string | null; - topic1?: string | null; - topic2?: string | null; - topic3?: string | null; - removed?: boolean; + block_number: number; + log_index: number; + transaction_index: number; + transaction_hash: string; + block_hash: string; + address: string; + data: string; + topic0: string | null; + topic1: string | null; + topic2: string | null; + topic3: string | null; + removed: boolean; }[]; }; }; @@ -143,19 +143,12 @@ export class HyperSyncEvmProvider extends EvmProvider { // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays for (const block of response.data.blocks) { - if ( - block.number != null && - block.timestamp != null && - block.hash && - block.parent_hash - ) { - allBlocks.push({ - number: block.number, - hash: block.hash, - parentHash: block.parent_hash, - timestamp: block.timestamp - }); - } + allBlocks.push({ + number: block.number, + hash: block.hash, + parentHash: block.parent_hash, + timestamp: block.timestamp + }); } for (const log of response.data.logs) { @@ -167,17 +160,14 @@ export class HyperSyncEvmProvider extends EvmProvider { ].filter((t): t is string => !!t) as `0x${string}`[]; allLogs.push({ - address: (log.address ?? '0x') as `0x${string}`, - blockHash: (log.block_hash ?? null) as `0x${string}` | null, - blockNumber: - log.block_number != null ? BigInt(log.block_number) : null, - data: (log.data ?? '0x') as `0x${string}`, - logIndex: log.log_index ?? 0, - transactionHash: (log.transaction_hash ?? null) as - | `0x${string}` - | null, - transactionIndex: log.transaction_index ?? 0, - removed: log.removed ?? false, + address: log.address as `0x${string}`, + blockHash: log.block_hash as `0x${string}`, + blockNumber: BigInt(log.block_number), + data: log.data as `0x${string}`, + logIndex: log.log_index, + transactionHash: log.transaction_hash as `0x${string}`, + transactionIndex: log.transaction_index, + removed: log.removed, topics } as Log); } From 32479a14024945c9ae819df0ea007aa883270f6b Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 18:34:38 +0800 Subject: [PATCH 30/34] chore: fix export ordering --- src/providers/evm/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/providers/evm/index.ts b/src/providers/evm/index.ts index dcf1e4d..f714bf4 100644 --- a/src/providers/evm/index.ts +++ b/src/providers/evm/index.ts @@ -1,5 +1,5 @@ -export { EvmProvider } from './provider'; -export { HyperSyncEvmProvider } from './hyper-sync-provider'; export { EvmIndexer } from './indexer'; +export { EvmProvider } from './provider'; export { HyperSyncEvmIndexer } from './hyper-sync-indexer'; +export { HyperSyncEvmProvider } from './hyper-sync-provider'; export * from './types'; From d4a24b6d3aac9ce5f11ebee58acb29a304fe8ddf Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 19:02:40 +0800 Subject: [PATCH 31/34] fix: handle undefined blocks/logs in HyperSync response HyperSync API returns undefined instead of empty arrays when there are no matching results, causing "response.data.blocks is not iterable". --- src/providers/evm/hyper-sync-provider.ts | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index 6bfbcbe..ca68366 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -14,13 +14,13 @@ type FetchedBlock = { type HyperSyncResponse = { next_block: number; data: { - blocks: { + blocks?: { number: number; timestamp: number; hash: string; parent_hash: string; }[]; - logs: { + logs?: { block_number: number; log_index: number; transaction_index: number; @@ -142,7 +142,7 @@ export class HyperSyncEvmProvider extends EvmProvider { }); // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays - for (const block of response.data.blocks) { + for (const block of response.data.blocks ?? []) { allBlocks.push({ number: block.number, hash: block.hash, @@ -151,13 +151,10 @@ export class HyperSyncEvmProvider extends EvmProvider { }); } - for (const log of response.data.logs) { - const topics = [ - log.topic0, - log.topic1, - log.topic2, - log.topic3 - ].filter((t): t is string => !!t) as `0x${string}`[]; + for (const log of response.data.logs ?? []) { + const topics = [log.topic0, log.topic1, log.topic2, log.topic3].filter( + (t): t is string => !!t + ) as `0x${string}`[]; allLogs.push({ address: log.address as `0x${string}`, From d1d4254b09cc5a1c898c3057d28ee442a28bdd44 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Sat, 11 Apr 2026 22:46:56 +0800 Subject: [PATCH 32/34] fix: correct HyperSync response parsing and use full preload range HyperSync API returns `data` as an array of `{ blocks, logs }` chunks, not a single object. The old code accessed `response.data.blocks` which was undefined on the array, causing all events to be silently skipped. Also allows HyperSync to preload the full block range in one call instead of using the small adaptive step designed for RPC providers. --- src/container.ts | 25 +++--- src/providers/base.ts | 4 + src/providers/evm/hyper-sync-provider.ts | 107 +++++++++++++---------- 3 files changed, 77 insertions(+), 59 deletions(-) diff --git a/src/container.ts b/src/container.ts index a663c30..57b50ab 100644 --- a/src/container.ts +++ b/src/container.ts @@ -239,13 +239,12 @@ export class Container implements Instance { if (this.preloadedBlocks.length > 0) return this.preloadedBlocks.shift() as number; + const providerRange = this.indexer.getProvider().getPreloadRange(); let currentBlock = blockNum; while (currentBlock <= this.preloadEndBlock) { - const endBlock = Math.min( - currentBlock + this.preloadStep, - this.preloadEndBlock - ); + const step = providerRange ?? this.preloadStep; + const endBlock = Math.min(currentBlock + step, this.preloadEndBlock); let checkpoints: CheckpointRecord[]; try { this.log.info( @@ -264,14 +263,16 @@ export class Container implements Instance { continue; } - const increase = - checkpoints.length > BLOCK_PRELOAD_TARGET - ? -BLOCK_PRELOAD_STEP - : +BLOCK_PRELOAD_STEP; - this.preloadStep = Math.max( - BLOCK_RELOAD_MIN_RANGE, - this.preloadStep + increase - ); + if (!providerRange) { + const increase = + checkpoints.length > BLOCK_PRELOAD_TARGET + ? -BLOCK_PRELOAD_STEP + : +BLOCK_PRELOAD_STEP; + this.preloadStep = Math.max( + BLOCK_RELOAD_MIN_RANGE, + this.preloadStep + increase + ); + } if (checkpoints.length > 0) { this.preloadedBlocks = [ diff --git a/src/providers/base.ts b/src/providers/base.ts index 0f47128..863a253 100644 --- a/src/providers/base.ts +++ b/src/providers/base.ts @@ -90,6 +90,10 @@ export class BaseProvider { ); } + getPreloadRange(): number | null { + return null; + } + async getCheckpointsRange( fromBlock: number, toBlock: number diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hyper-sync-provider.ts index ca68366..c47f01b 100644 --- a/src/providers/evm/hyper-sync-provider.ts +++ b/src/providers/evm/hyper-sync-provider.ts @@ -11,30 +11,34 @@ type FetchedBlock = { timestamp: number; }; +type HyperSyncBlock = { + number: number; + timestamp: number; + hash: string; + parent_hash: string; +}; + +type HyperSyncLog = { + block_number: number; + log_index: number; + transaction_index: number; + transaction_hash: string; + block_hash: string; + address: string; + data: string; + topic0: string | null; + topic1: string | null; + topic2: string | null; + topic3: string | null; + removed: boolean; +}; + type HyperSyncResponse = { next_block: number; data: { - blocks?: { - number: number; - timestamp: number; - hash: string; - parent_hash: string; - }[]; - logs?: { - block_number: number; - log_index: number; - transaction_index: number; - transaction_hash: string; - block_hash: string; - address: string; - data: string; - topic0: string | null; - topic1: string | null; - topic2: string | null; - topic3: string | null; - removed: boolean; - }[]; - }; + blocks?: HyperSyncBlock[]; + logs?: HyperSyncLog[]; + }[]; }; const FIELD_SELECTION = { @@ -69,6 +73,10 @@ export class HyperSyncEvmProvider extends EvmProvider { this.apiToken = params.apiToken; } + getPreloadRange(): number { + return Infinity; + } + async getCheckpointsRange( fromBlock: number, toBlock: number @@ -141,32 +149,37 @@ export class HyperSyncEvmProvider extends EvmProvider { field_selection: FIELD_SELECTION }); - // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays - for (const block of response.data.blocks ?? []) { - allBlocks.push({ - number: block.number, - hash: block.hash, - parentHash: block.parent_hash, - timestamp: block.timestamp - }); - } - - for (const log of response.data.logs ?? []) { - const topics = [log.topic0, log.topic1, log.topic2, log.topic3].filter( - (t): t is string => !!t - ) as `0x${string}`[]; - - allLogs.push({ - address: log.address as `0x${string}`, - blockHash: log.block_hash as `0x${string}`, - blockNumber: BigInt(log.block_number), - data: log.data as `0x${string}`, - logIndex: log.log_index, - transactionHash: log.transaction_hash as `0x${string}`, - transactionIndex: log.transaction_index, - removed: log.removed, - topics - } as Log); + for (const chunk of response.data) { + // NOTE: do not replace for/push with spread — spread causes stack overflow on large arrays + for (const block of chunk.blocks ?? []) { + allBlocks.push({ + number: block.number, + hash: block.hash, + parentHash: block.parent_hash, + timestamp: block.timestamp + }); + } + + for (const log of chunk.logs ?? []) { + const topics = [ + log.topic0, + log.topic1, + log.topic2, + log.topic3 + ].filter((t): t is string => !!t) as `0x${string}`[]; + + allLogs.push({ + address: log.address as `0x${string}`, + blockHash: log.block_hash as `0x${string}`, + blockNumber: BigInt(log.block_number), + data: log.data as `0x${string}`, + logIndex: log.log_index, + transactionHash: log.transaction_hash as `0x${string}`, + transactionIndex: log.transaction_index, + removed: log.removed, + topics + } as Log); + } } if (response.next_block >= exclusiveTo) break; From d56e9dce8332cc51b4bc1b49e75d18c4668dd6d1 Mon Sep 17 00:00:00 2001 From: wa0x6e <495709+wa0x6e@users.noreply.github.com> Date: Mon, 13 Apr 2026 22:00:36 +0800 Subject: [PATCH 33/34] style: rename hyper-sync to hypersync in filenames and imports --- .../evm/{hyper-sync-indexer.ts => hypersync-indexer.ts} | 2 +- .../evm/{hyper-sync-provider.ts => hypersync-provider.ts} | 0 src/providers/evm/index.ts | 4 ++-- 3 files changed, 3 insertions(+), 3 deletions(-) rename src/providers/evm/{hyper-sync-indexer.ts => hypersync-indexer.ts} (93%) rename src/providers/evm/{hyper-sync-provider.ts => hypersync-provider.ts} (100%) diff --git a/src/providers/evm/hyper-sync-indexer.ts b/src/providers/evm/hypersync-indexer.ts similarity index 93% rename from src/providers/evm/hyper-sync-indexer.ts rename to src/providers/evm/hypersync-indexer.ts index 2ed7b02..e95a8f6 100644 --- a/src/providers/evm/hyper-sync-indexer.ts +++ b/src/providers/evm/hypersync-indexer.ts @@ -1,6 +1,6 @@ import { Logger } from '../../utils/logger'; import { BaseIndexer, Instance } from '../base'; -import { HyperSyncEvmProvider } from './hyper-sync-provider'; +import { HyperSyncEvmProvider } from './hypersync-provider'; import { Writer } from './types'; export class HyperSyncEvmIndexer extends BaseIndexer { diff --git a/src/providers/evm/hyper-sync-provider.ts b/src/providers/evm/hypersync-provider.ts similarity index 100% rename from src/providers/evm/hyper-sync-provider.ts rename to src/providers/evm/hypersync-provider.ts diff --git a/src/providers/evm/index.ts b/src/providers/evm/index.ts index f714bf4..8f301c7 100644 --- a/src/providers/evm/index.ts +++ b/src/providers/evm/index.ts @@ -1,5 +1,5 @@ export { EvmIndexer } from './indexer'; export { EvmProvider } from './provider'; -export { HyperSyncEvmIndexer } from './hyper-sync-indexer'; -export { HyperSyncEvmProvider } from './hyper-sync-provider'; +export { HyperSyncEvmIndexer } from './hypersync-indexer'; +export { HyperSyncEvmProvider } from './hypersync-provider'; export * from './types'; From 35786094a6f54585ca1aa0e03d2891ce94dde90e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wiktor=20Tkaczy=C5=84ski?= Date: Mon, 13 Apr 2026 17:13:21 +0200 Subject: [PATCH 34/34] chore: bump version --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 7f19b77..b6e69ac 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@snapshot-labs/checkpoint", - "version": "0.1.0-beta.68", + "version": "0.1.0-beta.69", "license": "MIT", "bin": { "checkpoint": "dist/src/bin/index.js"