diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b0a7c7..c42de19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ ## Unreleased +- added: Added flexible service key matching with URL templating. +- added: Added support for WebSocket transports for evmRpc plugin. +- changed: Migrated to JSON-based logs using Pino library. +- changed: Use serviceKeys for nownodes API key. +- removed: Removed plugins to reduce load: abstract, amoy, arbitrum, avalanche, base, bobevm, celo, ethereumclassic, ethereumpow, fantom, filecoinfevm, filecoinfevmcalibration, holesky, hyperevm, pulsechain, rsk, sepolia, sonic. + ## 0.2.6 (2025-12-10) ## 0.2.5 (2025-12-04) diff --git a/docs/logging.md b/docs/logging.md new file mode 100644 index 0000000..f55b15b --- /dev/null +++ b/docs/logging.md @@ -0,0 +1,75 @@ +# Change Server Logging + +## Implementation + +Uses Pino with a base logger and child loggers for scoping. Located in `src/util/logger.ts`. + +### API + +```typescript +import { logger, makeLogger } from './util/logger' + +// Direct use of base logger for simple cases: +logger.info({ port: 3000 }, 'server started') + +// For code plugins (blockbook, evmRpc): +const pluginLogger = makeLogger('blockbook', 'ethereum') // scope, chainPluginId + +// For non-plugin code: +const socketLogger = makeLogger('socket') // scope only + +// Returns a pino.Logger - use standard Pino API: +pluginLogger.info('message') +pluginLogger.warn('message') +pluginLogger.error('message') + +// Pass an object with extra fields: +pluginLogger.info({ ip: '1.2.3.4' }, 'connected') +pluginLogger.info({ blockNum: '12345' }, 'block') +``` + +### Output Format + +JSON with these fields: + +- `level` - numeric level (30=info, 40=warn, 50=error) +- `time` - numeric epoch milliseconds +- `scope` - scope identifier (code plugin name: "blockbook", "evmRpc", "socket", "server") +- `chainPluginId` - chain plugin ID (optional, e.g., "ethereum", "arbitrum") +- `pid` - process ID (for socket events) +- `sid` - socket ID (for socket events, identifies the connection) +- `msg` - text message (Pino default key) +- Additional fields from passed objects + +Example: + +```json +{"level":30,"time":1735654281000,"scope":"blockbook","chainPluginId":"bitcoin","blockNum":"876543","msg":"block"} +{"level":30,"time":1735654281000,"scope":"socket","pid":20812,"sid":396,"ip":"192.168.1.1","msg":"connected"} +{"level":50,"time":1735654281000,"scope":"evmRpc","chainPluginId":"ethereum","msg":"watchBlocks error: connection timeout"} +``` + +### pino-pretty Support + +Logs are compatible with pino-pretty for human-readable output during development: + +```bash +node src/index.js | pino-pretty --translateTime +# Or use -t shorthand: +node src/index.js | pino-pretty -t +``` + +## Logged Events + +1. **WebSocket connection established** - scope: `socket`, includes pid, sid, IP +2. **Addresses subscribed** - scope: `socket`, includes pid, sid, IP, first 6 chars of addresses, pluginId, checkpoint +3. **Block found** - scope: code plugin, includes `chainPluginId`, block number +4. **Transaction detected** - scope: code plugin, includes `chainPluginId`, first 6 chars of address and txid +5. **Update sent** - scope: `socket`, includes pid, sid, IP, pluginId, address (6 chars), checkpoint + +## Notes + +- All output goes to stdout for logrotate compatibility +- Uses Pino's `.child()` pattern for efficient scoped logging +- Logging is disabled when `NODE_ENV=test` +- Reserved fields (`time`, `scope`) passed in log objects are automatically renamed with a `_` suffix to avoid conflicts (e.g., `time` becomes `time_`) diff --git a/jest.setup.ts b/jest.setup.ts index 14f3d82..25c3b77 100644 --- a/jest.setup.ts +++ b/jest.setup.ts @@ -7,5 +7,7 @@ jest.mock('./src/serverConfig', () => ({ 'api.etherscan.io': ['JYMB141VYKJ2KPVMYJUZC8PXGWKUFVFX8N'], 'eth.blockscout.com': [] } - } + }, + // Pass through URL unchanged in tests (no replacements) + replaceUrlParams: (url: string) => url })) diff --git a/package.json b/package.json index ff6e7b6..63b7c34 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ "clipanion": "^3.2.0-rc.3", "edge-server-tools": "^0.2.19", "node-fetch": "^2.6.0", + "pino": "^10.2.0", "prom-client": "^15.1.0", "serverlet": "^0.1.1", "viem": "^2.27.0", diff --git a/src/hub.ts b/src/hub.ts index 1dfb130..96ef6de 100644 --- a/src/hub.ts +++ b/src/hub.ts @@ -2,13 +2,14 @@ import { Counter, Gauge } from 'prom-client' import WebSocket from 'ws' import { messageToString } from './messageToString' -import { Logger } from './types' import { AddressPlugin } from './types/addressPlugin' import { changeProtocol, SubscribeParams, SubscribeResult } from './types/changeProtocol' +import { getAddressPrefix } from './util/addressUtils' +import { makeLogger } from './util/logger' import { stackify } from './util/stackify' const pluginGauge = new Gauge({ @@ -35,13 +36,12 @@ const eventCounter = new Counter({ }) export interface AddressHub { - handleConnection: (ws: WebSocket) => void + handleConnection: (ws: WebSocket, ip: string) => void destroy: () => void } export interface AddressHubOpts { plugins: AddressPlugin[] - logger?: Logger } interface PluginRow { @@ -58,13 +58,15 @@ interface PluginRow { */ export function makeAddressHub(opts: AddressHubOpts): AddressHub { const { plugins } = opts + const logger = makeLogger('socket') let nextSocketId = 0 - // Maps socketId to changeProtocol server codec: - const codecMap = new Map< - number, - ReturnType - >() + // Maps socketId to codec and IP info: + interface SocketInfo { + codec: ReturnType + ip: string + } + const codecMap = new Map() // Maps pluginId to PluginRow: const pluginMap = new Map() @@ -82,9 +84,18 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { const socketIds = pluginRow.addressSubscriptions.get(address) if (socketIds == null) return for (const socketId of socketIds) { - const codec = codecMap.get(socketId) - if (codec == null) continue - console.log(`WebSocket ${process.pid}.${socketId} update`) + const socketInfo = codecMap.get(socketId) + if (socketInfo == null) continue + const { codec, ip } = socketInfo + logger.info({ + pid: process.pid, + sid: socketId, + ip, + pluginId, + addr: getAddressPrefix(address), + checkpoint, + msg: 'update' + }) codec.remoteMethods.update([pluginId, address, checkpoint]) } }) @@ -95,8 +106,17 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { const socketIds = pluginRow.addressSubscriptions.get(address) if (socketIds == null) continue for (const socketId of socketIds) { - const codec = codecMap.get(socketId) - if (codec == null) continue + const socketInfo = codecMap.get(socketId) + if (socketInfo == null) continue + const { codec, ip } = socketInfo + logger.info({ + pid: process.pid, + sid: socketId, + ip, + pluginId, + addr: getAddressPrefix(address), + msg: 'subLost' + }) codec.remoteMethods.subLost([pluginId, address]) } pluginRow.addressSubscriptions.delete(address) @@ -148,28 +168,17 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { } return { - handleConnection(ws: WebSocket): void { + handleConnection(ws: WebSocket, ip: string): void { const socketId = nextSocketId++ - const logPrefix = `WebSocket ${process.pid}.${socketId} ` - const logger: Logger = { - log: (...args: unknown[]): void => { - opts.logger?.log(logPrefix, ...args) - }, - error: (...args: unknown[]): void => { - opts.logger?.error(logPrefix, ...args) - }, - warn: (...args: unknown[]): void => { - opts.logger?.warn(logPrefix, ...args) - } - } - connectionGauge.inc() - logger.log('connected') + const pid = process.pid + const sid = socketId + logger.info({ pid, sid, ip, msg: 'connected' }) const codec = changeProtocol.makeServerCodec({ handleError(error) { - logger.error(`send error: ${String(error)}`) + logger.error({ pid, sid, ip, msg: `send error: ${String(error)}` }) ws.close(1011, 'Internal error') }, @@ -181,7 +190,13 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { async subscribe( params: SubscribeParams[] ): Promise { - logger.log(`subscribing ${params.length}`) + // Log all subscriptions in one line + const subs = params.map(([pluginId, address, checkpoint]) => ({ + pluginId, + addr: getAddressPrefix(address), + checkpoint + })) + logger.info({ pid, sid, ip, subs, msg: 'subscribe' }) // Do the initial scan: const result = await Promise.all( @@ -205,7 +220,12 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { const changed = await pluginRow.plugin .scanAddress(address, checkpoint) .catch(error => { - logger.warn('Scan address failed: ' + stackify(error)) + logger.warn({ + pid, + sid, + ip, + msg: 'Scan address failed: ' + stackify(error) + }) return true }) return changed ? 2 : 1 @@ -213,13 +233,17 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { ) ) - logger.log(`subscribed ${params.length}`) - return result }, async unsubscribe(params: SubscribeParams[]): Promise { - logger.log(`unsubscribed ${params.length}`) + logger.info({ + pid, + sid, + ip, + count: params.length, + msg: 'unsubscribe' + }) for (const param of params) { const [pluginId, address] = param @@ -241,11 +265,21 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { } }) - // Save the codec for notifications: - codecMap.set(socketId, codec) + // Save the codec and IP for notifications: + codecMap.set(socketId, { codec, ip }) ws.on('close', () => { - logger.log(`closed`) + // Collect subscriptions for logging before cleanup: + const subs: Array<{ pluginId: string; addr: string }> = [] + for (const [pluginId, pluginRow] of pluginMap.entries()) { + for (const [address, socketIds] of pluginRow.addressSubscriptions) { + if (socketIds.has(socketId)) { + subs.push({ pluginId, addr: getAddressPrefix(address) }) + } + } + } + + logger.info({ pid, sid, ip, subs, msg: 'closed' }) connectionGauge.dec() // Cleanup the server codec: @@ -264,7 +298,12 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { if (socketIds.size < 1) { unsubscribeClientsToPluginAddress(pluginRow, address).catch( error => { - console.error('unsubscribe error:', error) + logger.error({ + pid, + sid, + ip, + msg: `unsubscribe error: ${String(error)}` + }) } ) } @@ -273,7 +312,12 @@ export function makeAddressHub(opts: AddressHubOpts): AddressHub { }) ws.on('error', error => { - logger.error(`connection error: ${String(error)}`) + logger.error({ + pid, + sid, + ip, + msg: `connection error: ${String(error)}` + }) }) ws.on('message', message => { diff --git a/src/index.ts b/src/index.ts index 36ef5fc..e601450 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,6 +5,10 @@ import WebSocket from 'ws' import { makeAddressHub } from './hub' import { serverConfig } from './serverConfig' +import { makeAlchemyWebhookHandler } from './util/alchemyWebhookHandler' +import { makeLogger } from './util/logger' + +const logger = makeLogger('server') const aggregatorRegistry = new AggregatorRegistry() @@ -24,7 +28,7 @@ function manageServers(): void { // Restart workers when they exit: cluster.on('exit', (worker, code, signal) => { const { pid = '?' } = worker.process - console.log(`Worker ${pid} died with code ${code} and signal ${signal}`) + logger.info({ pid, code, signal, msg: 'worker died' }) cluster.fork() }) @@ -44,29 +48,46 @@ function manageServers(): void { }) }) httpServer.listen(metricsPort, metricsHost) - console.log(`Metrics server listening on port ${metricsPort}`) + logger.info({ port: metricsPort, msg: 'metrics server listening' }) } async function server(): Promise { - const { allPlugins } = await import('./plugins/allPlugins') + const { makeAllPlugins } = await import('./plugins/allPlugins') const { listenPort, listenHost } = serverConfig + // Create and start the Alchemy webhook handler + const webhookHandler = makeAlchemyWebhookHandler() + webhookHandler.start() + + // Create all plugins, passing the webhook handler for Alchemy plugins + const allPlugins = makeAllPlugins(webhookHandler) + const wss = new WebSocket.Server({ port: listenPort, host: listenHost }) - console.log(`WebSocket server listening on port ${listenPort}`) - - const hub = makeAddressHub({ plugins: allPlugins, logger: console }) - wss.on('connection', ws => hub.handleConnection(ws)) + logger.info({ port: listenPort, msg: 'websocket server listening' }) + + const hub = makeAddressHub({ plugins: allPlugins }) + wss.on('connection', (ws, req) => { + // Extract IP from X-Forwarded-For header (if behind proxy) or socket + const forwardedFor = req.headers['x-forwarded-for'] + const ip = + (typeof forwardedFor === 'string' + ? forwardedFor.split(',')[0].trim() + : undefined) ?? + req.socket.remoteAddress ?? + 'unknown' + hub.handleConnection(ws, ip) + }) // Graceful shutdown handler const shutdown = (): void => { - console.log(`Worker ${process.pid} shutting down...`) + logger.info({ pid: process.pid, msg: 'shutting down' }) // Stop accepting new connections wss.close(() => { - console.log(`Worker ${process.pid} WebSocket server closed`) + logger.info({ pid: process.pid, msg: 'websocket server closed' }) }) // Close all existing client connections @@ -77,7 +98,10 @@ async function server(): Promise { // Clean up plugin resources (timers, WebSocket connections, etc.) hub.destroy() - console.log(`Worker ${process.pid} cleanup complete`) + // Stop the webhook server + webhookHandler.stop() + + logger.info({ pid: process.pid, msg: 'cleanup complete' }) process.exit(0) } @@ -86,6 +110,6 @@ async function server(): Promise { } main().catch(error => { - console.error(error) + logger.error({ err: error }, 'main error') process.exit(1) }) diff --git a/src/plugins/alchemy.ts b/src/plugins/alchemy.ts new file mode 100644 index 0000000..31ea10f --- /dev/null +++ b/src/plugins/alchemy.ts @@ -0,0 +1,249 @@ +import { makeEvents } from 'yavent' + +import { serverConfig } from '../serverConfig' +import { AddressPlugin, PluginEvents } from '../types/addressPlugin' +import { + AlchemyNetwork, + AlchemyNotifyApi, + makeAlchemyNotifyApi +} from '../util/alchemyNotifyApi' +import { + AlchemyActivity, + AlchemyWebhookHandler +} from '../util/alchemyWebhookHandler' +import { Logger, makeLogger } from '../util/logger' + +export interface AlchemyOptions { + pluginId: string + network: AlchemyNetwork + webhookHandler: AlchemyWebhookHandler +} + +/** + * Creates an Alchemy Address Activity webhook plugin. + * + * This plugin uses Alchemy's webhook infrastructure to receive real-time + * notifications when tracked addresses have on-chain activity. + */ +export function makeAlchemy(opts: AlchemyOptions): AddressPlugin { + const { pluginId, network, webhookHandler } = opts + + const [on, emit] = makeEvents() + + const logger: Logger = makeLogger('alchemy', pluginId) + + // Alchemy Notify API client + const notifyApi: AlchemyNotifyApi = makeAlchemyNotifyApi(logger) + + // Track subscribed addresses (normalized lowercase address -> original address) + const subscribedAddresses = new Map() + + // Webhook ID for this network (created on first subscription) + let webhookId: string | null = null + + // Pending address changes to batch + let pendingAddressesToAdd: string[] = [] + let pendingAddressesToRemove: string[] = [] + let batchTimeout: ReturnType | null = null + + // Batch delay in milliseconds (debounce address updates) + const BATCH_DELAY_MS = 1000 + + /** + * Handle incoming activity from the webhook handler + */ + function handleActivity( + _network: string, + activities: AlchemyActivity[] + ): void { + // Track which subscribed addresses have updates + const addressesToUpdate = new Set() + + for (const activity of activities) { + const normalizedFrom = activity.fromAddress?.toLowerCase() + const normalizedTo = activity.toAddress?.toLowerCase() + + // Check if fromAddress is subscribed + if (normalizedFrom != null) { + const originalFrom = subscribedAddresses.get(normalizedFrom) + if (originalFrom != null) { + addressesToUpdate.add(originalFrom) + } + } + + // Check if toAddress is subscribed + if (normalizedTo != null) { + const originalTo = subscribedAddresses.get(normalizedTo) + if (originalTo != null) { + addressesToUpdate.add(originalTo) + } + } + } + + // Emit update events for all affected subscribed addresses + for (const address of addressesToUpdate) { + // Use the block number from the first activity as checkpoint + const checkpoint = + activities.length > 0 + ? parseInt(activities[0].blockNum, 16).toString() + : undefined + emit('update', { address, checkpoint }) + } + } + + /** + * Process batched address changes + */ + async function processBatchedChanges(): Promise { + batchTimeout = null + + const toAdd = pendingAddressesToAdd + const toRemove = pendingAddressesToRemove + pendingAddressesToAdd = [] + pendingAddressesToRemove = [] + + if (toAdd.length === 0 && toRemove.length === 0) { + return + } + + try { + // Create webhook if it doesn't exist and we have addresses to add + if (webhookId == null && toAdd.length > 0) { + const webhookUrl = `${serverConfig.publicUri}/webhook/alchemy` + logger.info({ network, msg: 'Creating webhook' }) + + const response = await notifyApi.createWebhook({ + network, + webhookUrl, + addresses: toAdd + }) + + webhookId = response.data.id + logger.info({ webhookId, msg: 'Created webhook' }) + + // Clear toAdd since they were included in creation + toAdd.length = 0 + } + + // Update addresses if webhook exists + if (webhookId != null && (toAdd.length > 0 || toRemove.length > 0)) { + logger.info({ + added: toAdd.length, + removed: toRemove.length, + msg: 'Updating addresses' + }) + + await notifyApi.updateWebhookAddresses({ + webhookId, + addressesToAdd: toAdd.length > 0 ? toAdd : undefined, + addressesToRemove: toRemove.length > 0 ? toRemove : undefined + }) + } + } catch (error) { + logger.error({ err: error, msg: 'Failed to update webhook addresses' }) + + // Re-queue failed operations + pendingAddressesToAdd.push(...toAdd) + pendingAddressesToRemove.push(...toRemove) + + // Retry after delay + scheduleBatch() + } + } + + /** + * Schedule batch processing + */ + function scheduleBatch(): void { + if (batchTimeout == null) { + batchTimeout = setTimeout(() => { + processBatchedChanges().catch(error => { + logger.error({ err: error, msg: 'Batch processing error' }) + }) + }, BATCH_DELAY_MS) + } + } + + // Register this plugin's handler with the webhook handler + webhookHandler.registerNetworkHandler(network, handleActivity) + + const plugin: AddressPlugin = { + pluginId, + on, + + async subscribe(address: string): Promise { + const normalizedAddress = address.toLowerCase() + + // Check if already subscribed + if (subscribedAddresses.has(normalizedAddress)) { + return true + } + + // Track locally + subscribedAddresses.set(normalizedAddress, address) + + // Queue for batch update to Alchemy + pendingAddressesToAdd.push(address) + + // Remove from pending removals if present + const removeIndex = pendingAddressesToRemove.indexOf(address) + if (removeIndex !== -1) { + pendingAddressesToRemove.splice(removeIndex, 1) + } + + scheduleBatch() + + return true + }, + + async unsubscribe(address: string): Promise { + const normalizedAddress = address.toLowerCase() + + // Check if subscribed + if (!subscribedAddresses.has(normalizedAddress)) { + return false + } + + // Remove from local tracking + subscribedAddresses.delete(normalizedAddress) + + // Queue for batch update to Alchemy + pendingAddressesToRemove.push(address) + + // Remove from pending additions if present + const addIndex = pendingAddressesToAdd.indexOf(address) + if (addIndex !== -1) { + pendingAddressesToAdd.splice(addIndex, 1) + } + + scheduleBatch() + + return true + }, + + // Note: scanAddress is not implemented for Alchemy webhooks + // The webhook model pushes updates, so scanning is not needed. + // If needed in future, could use Alchemy Transfers API. + + destroy() { + // Clear batch timeout + if (batchTimeout != null) { + clearTimeout(batchTimeout) + batchTimeout = null + } + + // Unregister from webhook handler + webhookHandler.unregisterNetworkHandler(network) + + // Clear local state + subscribedAddresses.clear() + pendingAddressesToAdd = [] + pendingAddressesToRemove = [] + + // Note: We don't delete the webhook on destroy + // It can be reused on restart, and addresses can be managed + } + } + + return plugin +} diff --git a/src/plugins/allPlugins.ts b/src/plugins/allPlugins.ts index 94c0651..f884053 100644 --- a/src/plugins/allPlugins.ts +++ b/src/plugins/allPlugins.ts @@ -1,373 +1,110 @@ -import { serverConfig } from '../serverConfig' import { AddressPlugin } from '../types/addressPlugin' -import { BlockbookOptions, makeBlockbook } from './blockbook' +import { AlchemyWebhookHandler } from '../util/alchemyWebhookHandler' +import { authenticateUrl } from '../util/authenticateUrl' +import { makeAlchemy } from './alchemy' +import { makeBlockbook } from './blockbook' import { makeEvmRpc } from './evmRpc' import { makeFakePlugin } from './fakePlugin' -function makeNowNode(opts: BlockbookOptions): AddressPlugin { - return makeBlockbook({ - ...opts, - nowNodesApiKey: serverConfig.nowNodesApiKey - }) -} +export function makeAllPlugins( + webhookHandler: AlchemyWebhookHandler +): AddressPlugin[] { + return [ + // Bitcoin family: + makeBlockbook({ + pluginId: 'bitcoin', + url: authenticateUrl('wss://btcbook.nownodes.io/wss/{{apiKey}}') + }), + makeBlockbook({ + pluginId: 'bitcoincash', + url: authenticateUrl('wss://bchbook.nownodes.io/wss/{{apiKey}}') + }), + makeBlockbook({ + pluginId: 'dogecoin', + url: authenticateUrl('wss://dogebook.nownodes.io/wss/{{apiKey}}') + }), + makeBlockbook({ + pluginId: 'litecoin', + url: authenticateUrl('wss://ltcbook.nownodes.io/wss/{{apiKey}}') + }), + makeBlockbook({ + pluginId: 'qtum', + url: authenticateUrl('wss://qtum-blockbook.nownodes.io/wss/{{apiKey}}') + }), -export const allPlugins = [ - // Bitcoin family: - makeNowNode({ - pluginId: 'bitcoin', - url: 'wss://btcbook.nownodes.io/wss/{nowNodesApiKey}' - }), - makeNowNode({ - pluginId: 'bitcoincash', - url: 'wss://bchbook.nownodes.io/wss/{nowNodesApiKey}' - }), - makeNowNode({ - pluginId: 'dogecoin', - url: 'wss://dogebook.nownodes.io/wss/{nowNodesApiKey}' - }), - makeNowNode({ - pluginId: 'litecoin', - url: 'wss://ltcbook.nownodes.io/wss/{nowNodesApiKey}' - }), - makeNowNode({ - pluginId: 'qtum', - url: 'wss://qtum-blockbook.nownodes.io/wss/{nowNodesApiKey}' - }), + // EVM chains using Alchemy Address Activity webhooks: + makeAlchemy({ + pluginId: 'ethereum', + network: 'ETH_MAINNET', + webhookHandler + }), + makeAlchemy({ + pluginId: 'polygon', + network: 'MATIC_MAINNET', + webhookHandler + }), + makeAlchemy({ + pluginId: 'optimism', + network: 'OPT_MAINNET', + webhookHandler + }), - // Ethereum family: - makeEvmRpc({ - pluginId: 'abstract', - urls: [ - 'https://abstract.api.onfinality.io/public' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 2741, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'amoy', - urls: [ - 'https://api.zan.top/polygon-amoy', // yellow privacy - 'https://polygon-amoy-public.nodies.app', // yellow privacy - 'https://polygon-amoy.api.onfinality.io/public' // yellow privacy - ] - }), - makeEvmRpc({ - pluginId: 'arbitrum', - urls: [ - 'https://arbitrum-one-rpc.publicnode.com', // green privacy - 'https://arbitrum.meowrpc.com', // green privacy - 'https://public-arb-mainnet.fastnode.io', // green privacy - 'https://api.zan.top/arb-one', // yellow privacy - 'https://arbitrum-one-public.nodies.app', // yellow privacy - 'https://arbitrum.api.onfinality.io/public', // yellow privacy - 'https://rpc.poolz.finance/arbitrum' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 42161, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'avalanche', - urls: [ - 'https://0xrpc.io/avax', // green privacy - 'https://avalanche-c-chain-rpc.publicnode.com', // green privacy - 'https://avax.meowrpc.com', // green privacy - 'https://endpoints.omniatech.io/v1/avax/mainnet/public', // green privacy - 'https://api.zan.top/avax-mainnet/ext/bc/C/rpc', // yellow privacy - 'https://avalanche-public.nodies.app/ext/bc/C/rpc', // yellow privacy - 'https://avalanche.api.onfinality.io/public/ext/bc/C/rpc', // yellow privacy - 'https://rpc.poolz.finance/avalanche' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 43114, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'base', - urls: [ - 'https://base-rpc.publicnode.com', // green privacy - 'https://base.llamarpc.com', // green privacy - 'https://base.meowrpc.com', // green privacy - 'https://api.zan.top/base-mainnet', // yellow privacy - 'https://base-public.nodies.app', // yellow privacy - 'https://base.api.onfinality.io/public', // yellow privacy - 'https://rpc.poolz.finance/base' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 8453, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'binancesmartchain', - urls: [ - 'https://binance.llamarpc.com', // green privacy - 'https://bsc-rpc.publicnode.com', // green privacy - 'https://bsc.blockrazor.xyz', // green privacy - 'https://bsc.meowrpc.com', // green privacy - 'https://endpoints.omniatech.io/v1/bsc/mainnet/public', // green privacy - 'https://public-bsc-mainnet.fastnode.io', // green privacy - 'https://0.48.club', // yellow privacy - 'https://api-bsc-mainnet-full.n.dwellir.com/2ccf18bf-2916-4198-8856-42172854353c', // yellow privacy - 'https://api.zan.top/bsc-mainnet', // yellow privacy - 'https://binance-smart-chain-public.nodies.app', // yellow privacy - 'https://bnb.api.onfinality.io/public', // yellow privacy - 'https://go.getblock.io/cc778cdbdf5c4b028ec9456e0e6c0cf3', // yellow privacy - 'https://rpc-bsc.48.club', // yellow privacy - 'https://rpc.poolz.finance/bsc' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 56, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'bobevm', - urls: ['https://rpc.gobob.xyz'], // original URL - all chainlist RPCs failed - scanAdapters: [ - { type: 'etherscan-v1', urls: ['https://explorer.gobob.xyz'] } - ] - }), - makeEvmRpc({ - pluginId: 'botanix', - urls: ['https://rpc.ankr.com/botanix_mainnet'], // green privacy - scanAdapters: [ - { - type: 'etherscan-v1', - urls: ['https://api.routescan.io/v2/network/mainnet/evm/3637/etherscan'] - } - ] - }), - makeEvmRpc({ - pluginId: 'celo', - urls: [ - 'https://celo-json-rpc.stakely.io', // green privacy - 'https://celo.api.onfinality.io/public', // yellow privacy - 'https://rpc.ankr.com/celo' // yellow privacy - ], - scanAdapters: [ - { type: 'etherscan-v1', urls: ['https://explorer.celo.org/mainnet'] }, - { - type: 'etherscan-v2', - chainId: 42220, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'ethereum', - urls: [ - 'https://0xrpc.io/eth', // green privacy - 'https://endpoints.omniatech.io/v1/eth/mainnet/public', // green privacy - 'https://eth.blockrazor.xyz', // green privacy - 'https://eth.llamarpc.com', // green privacy - 'https://eth.meowrpc.com', // green privacy - 'https://eth.merkle.io', // green privacy - 'https://ethereum-json-rpc.stakely.io', // green privacy - 'https://ethereum-rpc.publicnode.com', // green privacy - 'https://go.getblock.io/aefd01aa907c4805ba3c00a9e5b48c6b', // green privacy - 'https://rpc.flashbots.net', // green privacy - 'https://rpc.mevblocker.io', // green privacy - 'https://rpc.payload.de', // green privacy - 'https://api.zan.top/eth-mainnet', // yellow privacy - 'https://eth.api.onfinality.io/public', // yellow privacy - 'https://ethereum-public.nodies.app', // yellow privacy - 'https://rpc.poolz.finance/eth' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 1, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'ethereumclassic', - urls: [ - 'https://0xrpc.io/etc', // green privacy - 'https://etc.rivet.link', // green privacy - 'https://ethereum-classic-mainnet.gateway.tatum.io' // green privacy - ], - scanAdapters: [ - { type: 'etherscan-v1', urls: ['https://etc.blockscout.com'] } - ] - }), - makeEvmRpc({ - pluginId: 'ethereumpow', - urls: ['https://mainnet.ethereumpow.org'] // no chainlist RPCs found, keeping original - }), - makeEvmRpc({ - pluginId: 'fantom', - urls: [ - 'https://endpoints.omniatech.io/v1/fantom/mainnet/public', // green privacy - 'https://fantom-json-rpc.stakely.io', // green privacy - 'https://api.zan.top/ftm-mainnet', // yellow privacy - 'https://fantom-public.nodies.app', // yellow privacy - 'https://fantom.api.onfinality.io/public' // yellow privacy - ], - scanAdapters: [{ type: 'etherscan-v1', urls: ['https://ftmscout.com/'] }] - }), - makeEvmRpc({ - pluginId: 'filecoinfevm', - urls: [ - 'https://filecoin.chainup.net/rpc/v1', // yellow privacy - 'https://rpc.ankr.com/filecoin' // yellow privacy - ] - }), - makeEvmRpc({ - pluginId: 'filecoinfevmcalibration', - urls: ['https://rpc.ankr.com/filecoin_testnet'] // original URL - all chainlist RPCs failed - }), - makeEvmRpc({ - pluginId: 'holesky', - urls: ['https://ethereum-holesky-rpc.publicnode.com'], // original URL - all chainlist RPCs failed - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 17000, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'hyperevm', - urls: ['https://rpc.hyperliquid.xyz/evm'], // no chainlist RPCs found, keeping original - scanAdapters: [ - { - type: 'etherscan-v1', - urls: ['https://api.routescan.io/v2/network/mainnet/evm/999/etherscan'] - } - ] - }), - makeEvmRpc({ - pluginId: 'optimism', - urls: [ - 'https://0xrpc.io/op', // green privacy - 'https://endpoints.omniatech.io/v1/op/mainnet/public', // green privacy - 'https://optimism-rpc.publicnode.com', // green privacy - 'https://public-op-mainnet.fastnode.io', // green privacy - 'https://api.zan.top/opt-mainnet', // yellow privacy - 'https://optimism-public.nodies.app', // yellow privacy - 'https://optimism.api.onfinality.io/public' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 10, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'polygon', - urls: [ - 'https://endpoints.omniatech.io/v1/matic/mainnet/public', // green privacy - 'https://polygon-bor-rpc.publicnode.com', // green privacy - 'https://polygon.meowrpc.com', // green privacy - 'https://api.zan.top/polygon-mainnet', // yellow privacy - 'https://polygon-public.nodies.app', // yellow privacy - 'https://polygon.api.onfinality.io/public', // yellow privacy - 'https://rpc.poolz.finance/polygon' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 137, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'pulsechain', - urls: [ - 'https://pulsechain-rpc.publicnode.com', // green privacy - 'https://rpc.pulsechainrpc.com', // green privacy - 'https://rpc.pulsechainstats.com' // yellow privacy - ], - scanAdapters: [ - { type: 'etherscan-v1', urls: ['https://api.scan.pulsechain.com'] } - ] - }), - makeEvmRpc({ - pluginId: 'rsk', - urls: ['https://public-node.rsk.co'], // original URL - all chainlist RPCs failed - scanAdapters: [ - { type: 'etherscan-v1', urls: ['https://rootstock.blockscout.com/'] } - ] - }), - makeEvmRpc({ - pluginId: 'sepolia', - urls: [ - 'https://0xrpc.io/sep', // green privacy - 'https://ethereum-sepolia-rpc.publicnode.com', // green privacy - 'https://api.zan.top/eth-sepolia', // yellow privacy - 'https://eth-sepolia.api.onfinality.io/public', // yellow privacy - 'https://ethereum-sepolia-public.nodies.app' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 11155111, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'sonic', - urls: ['https://sonic-json-rpc.stakely.io'], // green privacy - scanAdapters: [ - { - type: 'etherscan-v2', - chainId: 146, - urls: ['https://api.etherscan.io'] - } - ] - }), - makeEvmRpc({ - pluginId: 'zksync', - urls: [ - 'https://rpc.ankr.com/zksync_era', // green privacy - 'https://zksync.meowrpc.com', // green privacy - 'https://api.zan.top/zksync-mainnet', // yellow privacy - 'https://go.getblock.io/f76c09905def4618a34946bf71851542', // yellow privacy - 'https://zksync.api.onfinality.io/public' // yellow privacy - ], - scanAdapters: [ - { - type: 'etherscan-v1', - urls: [ - 'https://block-explorer-api.mainnet.zksync.io', - 'https://zksync.blockscout.com' - ] - }, - { - type: 'etherscan-v2', - chainId: 324, - urls: ['https://api.etherscan.io'] - } - ] - }), + // EVM chains using RPC polling (not yet supported by Alchemy webhooks): + makeEvmRpc({ + pluginId: 'binancesmartchain', + urls: [ + authenticateUrl('wss://lb.drpc.org/ogrpc?network=bsc&dkey={{apiKey}}'), + authenticateUrl('https://lb.drpc.org/ogrpc?network=bsc&dkey={{apiKey}}') + ], + scanAdapters: [ + { + type: 'etherscan-v2', + chainId: 56, + urls: ['https://api.etherscan.io'] + } + ] + }), + makeEvmRpc({ + pluginId: 'botanix', + urls: [ + 'wss://rpc.ankr.com/botanix_mainnet', + 'https://rpc.ankr.com/botanix_mainnet' + ], + scanAdapters: [ + { + type: 'etherscan-v1', + urls: [ + 'https://api.routescan.io/v2/network/mainnet/evm/3637/etherscan' + ] + } + ] + }), + makeEvmRpc({ + pluginId: 'zksync', + urls: [ + authenticateUrl( + 'wss://lb.drpc.org/ogrpc?network=zksync&dkey={{apiKey}}' + ), + authenticateUrl( + 'https://lb.drpc.org/ogrpc?network=zksync&dkey={{apiKey}}' + ) + ], + scanAdapters: [ + { + type: 'etherscan-v2', + chainId: 324, + urls: ['https://api.etherscan.io'] + }, + { + type: 'etherscan-v1', + urls: [ + 'https://api.routescan.io/v2/network/mainnet/evm/324/etherscan' + ] + } + ] + }), - // Testing: - makeFakePlugin() -] + // Testing: + makeFakePlugin() + ] +} diff --git a/src/plugins/blockbook.ts b/src/plugins/blockbook.ts index a4b18f7..008be90 100644 --- a/src/plugins/blockbook.ts +++ b/src/plugins/blockbook.ts @@ -9,6 +9,8 @@ import { blockbookProtocol, BlockbookProtocolServer } from '../types/blockbookProtocol' +import { getAddressPrefix } from '../util/addressUtils' +import { makeLogger } from '../util/logger' import { snooze } from '../util/snooze' const MAX_ADDRESS_COUNT_PER_CONNECTION = 100 @@ -34,9 +36,6 @@ export interface BlockbookOptions { /** The actual connection URL */ url: string - - /** Optional API key to replace {nowNodesApiKey} template in URL */ - nowNodesApiKey?: string } interface Connection { @@ -47,18 +46,10 @@ interface Connection { } export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { - const { pluginId, url, nowNodesApiKey } = opts + const { pluginId, url } = opts const [on, emit] = makeEvents() - // Replace template with actual API key for connection URL - const connectionUrl = - nowNodesApiKey != null - ? url.replace('{nowNodesApiKey}', nowNodesApiKey) - : url - // Use original URL (with template) for logging - no sanitization needed - const logUrl = url - const addressToConnection = new Map() const connections: Connection[] = [] // Map of address to unconfirmed txids for tracking mempool transactions @@ -90,21 +81,10 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { } })() - const logPrefix = `${pluginId} (${logUrl}):` - const logger = { - log: (...args: unknown[]): void => { - console.log(logPrefix, ...args) - }, - error: (...args: unknown[]): void => { - console.error(logPrefix, ...args) - }, - warn: (...args: unknown[]): void => { - console.warn(logPrefix, ...args) - } - } + const logger = makeLogger('blockbook', pluginId) function makeConnection(): Connection { - const ws = new WebSocket(connectionUrl) + const ws = new WebSocket(url) const codec = blockbookProtocol.makeClientCodec({ handleError, async handleSend(text) { @@ -121,12 +101,12 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { }) const socketReady = new Promise(resolve => { ws.on('open', () => { - pluginConnectionCounter.inc({ pluginId, url: logUrl }) + pluginConnectionCounter.inc({ pluginId, url: url }) resolve() }) }) ws.on('close', () => { - pluginDisconnectionCounter.inc({ pluginId, url: logUrl }) + pluginDisconnectionCounter.inc({ pluginId, url: url }) if (connection === blockConnection) { // If this was the block connection, re-init it (unless destroyed). @@ -135,7 +115,7 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { snooze(getBlockConnectionReconnectDelay()) .then(() => initBlockConnection()) .catch(err => { - console.error('Failed to re-initialize block connection:', err) + logger.error({ err }, 'Failed to re-initialize block connection') }) } } else { @@ -174,7 +154,7 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { .subscribeNewBlock(undefined) .then(result => { if (result.subscribed) { - logger.log('Block connection initialized') + logger.info({ scope: 'foo' }, 'Block connection initialized') } else { logger.error('Failed to subscribe to new blocks') } @@ -221,9 +201,9 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { function handleError(error: unknown): void { // Log to Prometheus: - pluginErrorCounter.inc({ pluginId, url: logUrl }) + pluginErrorCounter.inc({ pluginId, url: url }) - logger.warn('WebSocket error:', error) + logger.warn(`WebSocket error: ${String(error)}`) } function subscribeAddresses({ address, @@ -231,6 +211,11 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { }: Parameters< BlockbookProtocolServer['remoteMethods']['subscribeAddresses'] >[0]): void { + logger.info({ + addr: getAddressPrefix(address), + txid: getAddressPrefix(tx.txid), + msg: 'tx detected' + }) // Add the tx hash to a list of unconfirmed transactions watchUnconfirmedTx(address, tx.txid) emit('update', { address }) @@ -241,6 +226,7 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { }: Parameters< BlockbookProtocolServer['remoteMethods']['subscribeNewBlock'] >[0]): void { + logger.info({ blockNum: height.toString(), msg: 'block' }) // Check unconfirmed transactions and update clients for (const [ address, @@ -304,7 +290,7 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { await connection.codec.remoteMethods.ping(undefined) }) .catch(error => { - logger.error('ping error:', error) + logger.error({ err: error }, 'ping error') }) } @@ -315,7 +301,7 @@ export function makeBlockbook(opts: BlockbookOptions): AddressPlugin { await blockConnection?.codec.remoteMethods.ping(undefined) }) .catch(error => { - logger.error('block connection ping error:', error) + logger.error({ err: error }, 'block connection ping error') }) } }, 50000) diff --git a/src/plugins/evmRpc.ts b/src/plugins/evmRpc.ts index c372a65..5ec6425 100644 --- a/src/plugins/evmRpc.ts +++ b/src/plugins/evmRpc.ts @@ -1,9 +1,18 @@ -import { createPublicClient, fallback, http, parseAbiItem } from 'viem' +import { + createPublicClient, + fallback, + http, + HttpTransport, + parseAbiItem, + webSocket, + WebSocketTransport +} from 'viem' import { mainnet } from 'viem/chains' import { makeEvents } from 'yavent' -import { Logger } from '../types' import { AddressPlugin, PluginEvents } from '../types/addressPlugin' +import { getAddressPrefix } from '../util/addressUtils' +import { Logger, makeLogger } from '../util/logger' import { pickRandom } from '../util/pickRandom' import { makeEtherscanV1ScanAdapter } from '../util/scanAdapters/EtherscanV1ScanAdapter' import { makeEtherscanV2ScanAdapter } from '../util/scanAdapters/EtherscanV2ScanAdapter' @@ -19,7 +28,7 @@ export interface EvmRpcOptions { urls: string[] /** The scan adapters to use for this plugin. */ - scanAdapters?: ScanAdapterConfig[] + scanAdapters: ScanAdapterConfig[] /** Enable value-carrying internal transfer detection via traces (default `true`) */ includeInternal?: boolean @@ -34,82 +43,36 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { const [on, emit] = makeEvents() - // Track which URL is currently being used by the fallback transport - let activeUrl: string = pickRandom(urls) - - // Create a logger that uses the active URL (sanitized for logging) - const getLogPrefix = (): string => - `${pluginId} (${sanitizeUrlForLogging(activeUrl)}):` - const logger: Logger = { - log: (...args: unknown[]): void => { - console.log(getLogPrefix(), ...args) - }, - error: (...args: unknown[]): void => { - console.error(getLogPrefix(), ...args) - }, - warn: (...args: unknown[]): void => { - console.warn(getLogPrefix(), ...args) - } - } + const logger = makeLogger('evmRpc', pluginId) // Track subscribed addresses (normalized lowercase address -> original address) const subscribedAddresses = new Map() - // Create a map to track which URL corresponds to which transport instance. - // Using WeakMap so transport instances can be garbage collected when no longer used. - const transportUrlMap = new WeakMap() - // Create fallback transport with all URLs - const transports = urls.map(url => { - const httpTransport = http(url) - // Wrap the transport factory to track URL mapping - return (config: any) => { - const transportInstance = httpTransport(config) - // Store the mapping from transport instance to URL - transportUrlMap.set(transportInstance, url) - return transportInstance - } - }) - const transport = fallback(transports) + const transport = fallback(urls.map(url => createTransport(url))) const client = createPublicClient({ chain: mainnet, transport }) - // Access the transport's onResponse callback after client creation to track which URL was used - // The transport is created internally, so we need to access it through the client's transport - const fallbackTransport = client.transport as any - if (fallbackTransport?.onResponse != null) { - fallbackTransport.onResponse( - ({ - transport: usedTransport, - status - }: { - transport: any - status: 'success' | 'error' - }) => { - // When a transport succeeds, update the active URL - if (status === 'success' && usedTransport != null) { - const url = transportUrlMap.get(usedTransport) - if (url != null) { - activeUrl = url - } - } - } - ) - } - const unwatchBlocks = client.watchBlocks({ includeTransactions: true, emitMissed: true, - onError: error => { - logger.error('watchBlocks error', error) + onError: err => { + logger.error({ err }, 'watchBlocks error') }, onBlock: async block => { - logger.log('onBlock', block.number) + logger.info({ + blockNum: block.number.toString(), + msg: 'block', + numSubs: subscribedAddresses.size + }) + // Skip processing if no subscriptions - if (subscribedAddresses.size === 0) return + if (subscribedAddresses.size === 0) { + return + } // Track which subscribed addresses have updates in this block const addressesToUpdate = new Set() @@ -134,10 +97,19 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { }) // Check ERC20 transfers - const transferLogs = await client.getLogs({ - blockHash: block.hash, - event: ERC20_TRANSFER_EVENT - }) + const transferLogs = await client + .getLogs({ + blockHash: block.hash, + event: ERC20_TRANSFER_EVENT + }) + .catch(error => { + logger.error({ + err: error, + blockNum: block.number.toString(), + msg: 'getLogs error' + }) + throw error + }) transferLogs.forEach(log => { const normalizedFromAddress = log.args.from?.toLowerCase() const normalizedToAddress = log.args.to?.toLowerCase() @@ -157,6 +129,7 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { } }) + let traceBlock = true // Internal native-value transfers via traces if (opts.includeInternal !== false) { const addressesFromTraces = new Set() @@ -181,6 +154,7 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { } } catch (e) { // fall through to geth debug_traceTransaction + traceBlock = false } if (!traced) { @@ -199,7 +173,10 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { ] }) ) - ) + ).catch(error => { + logger.error({ err: error }, 'debug_traceTransaction error') + throw error + }) const walk = (node: any): void => { if (node == null) return @@ -225,11 +202,23 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { // Emit update events for all affected subscribed addresses for (const originalAddress of addressesToUpdate) { + logger.info({ + addr: getAddressPrefix(originalAddress), + msg: 'tx detected' + }) emit('update', { address: originalAddress, checkpoint: block.number.toString() }) } + logger.info({ + blockNum: block.number.toString(), + msg: 'block processed', + internal: opts.includeInternal !== false, + traceBlock, + numSubs: subscribedAddresses.size, + numUpdates: addressesToUpdate.size + }) } }) @@ -246,12 +235,13 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { }, on, scanAddress: async (address, checkpoint): Promise => { - // if no adapters are provided, then we have no way to implement - // scanAddress. - if (scanAdapters == null || scanAdapters.length === 0) { + const scanAdapter = pickRandom(scanAdapters) + if (scanAdapter == null) { + // If no adapters are provided, then we have no way to implement + // scanAddress. + logger.error({ msg: 'No scan adapters provided', pluginId }) return true } - const scanAdapter = pickRandom(scanAdapters) const adapter = getScanAdapter(scanAdapter, logger) return await adapter(address, checkpoint) }, @@ -264,6 +254,20 @@ export function makeEvmRpc(opts: EvmRpcOptions): AddressPlugin { return plugin } +function createTransport(url: string): HttpTransport | WebSocketTransport { + const protocol = new URL(url).protocol + switch (protocol) { + case 'ws:': + case 'wss:': + return webSocket(url) + case 'http:': + case 'https:': + return http(url) + default: + throw new Error(`Unsupported URL protocol: ${protocol}`) + } +} + function getScanAdapter( scanAdapterConfig: ScanAdapterConfig, logger: Logger @@ -275,15 +279,3 @@ function getScanAdapter( return makeEtherscanV2ScanAdapter(scanAdapterConfig, logger) } } - -/** - * Sanitizes a URL for safe logging by removing sensitive information like API keys. - * TODO: Implement URL sanitization once API keys are used for RPC URLs. - * - * @param url - The URL to sanitize - * @returns A sanitized URL safe for logging - */ -function sanitizeUrlForLogging(url: string): string { - // TODO: We'll clean URLs once API keys are used for RPC URLs - return url -} diff --git a/src/serverConfig.ts b/src/serverConfig.ts index 9ef5f6f..c5c5e88 100644 --- a/src/serverConfig.ts +++ b/src/serverConfig.ts @@ -1,7 +1,9 @@ import { makeConfig } from 'cleaner-config' -import { asArray, asNumber, asObject, asOptional, asString } from 'cleaners' +import { asNumber, asObject, asOptional, asString } from 'cleaners' import { cpus } from 'os' +import { asServiceKeys } from './util/serviceKeys' + /** * Configures the server process as a whole, * such as where to listen and how to talk to the database. @@ -15,16 +17,17 @@ const asServerConfig = asObject({ listenPort: asOptional(asNumber, 8008), metricsHost: asOptional(asString, '127.0.0.1'), metricsPort: asOptional(asNumber, 8009), + webhookHost: asOptional(asString, '127.0.0.1'), + webhookPort: asOptional(asNumber, 8010), publicUri: asOptional(asString, 'https://address1.edge.app'), + // Alchemy webhook: + alchemyWebhookSigningKey: asOptional(asString, ''), + // Resources: - nowNodesApiKey: asOptional(asString, ''), - serviceKeys: asOptional( - asObject(asArray(asString)), - () => ({ - '': [''] - }) - ) + serviceKeys: asOptional(asServiceKeys, () => ({ + '': [''] + })) }) export const serverConfig = makeConfig( diff --git a/src/types.ts b/src/types.ts deleted file mode 100644 index 6366c32..0000000 --- a/src/types.ts +++ /dev/null @@ -1,5 +0,0 @@ -export interface Logger { - log: (...args: unknown[]) => void - error: (...args: unknown[]) => void - warn: (...args: unknown[]) => void -} diff --git a/src/util/addressUtils.ts b/src/util/addressUtils.ts new file mode 100644 index 0000000..2ca5ba1 --- /dev/null +++ b/src/util/addressUtils.ts @@ -0,0 +1,3 @@ +export function getAddressPrefix(address: string): string { + return address.slice(0, 6) +} diff --git a/src/util/alchemyNotifyApi.ts b/src/util/alchemyNotifyApi.ts new file mode 100644 index 0000000..ce6086c --- /dev/null +++ b/src/util/alchemyNotifyApi.ts @@ -0,0 +1,177 @@ +import fetch from 'node-fetch' + +import { serverConfig } from '../serverConfig' +import { Logger } from './logger' +import { pickRandom } from './pickRandom' +import { serviceKeysFromUrl } from './serviceKeys' + +// Alchemy network identifiers +export type AlchemyNetwork = + | 'ETH_MAINNET' + | 'ETH_SEPOLIA' + | 'ETH_HOLESKY' + | 'MATIC_MAINNET' + | 'MATIC_AMOY' + | 'ARB_MAINNET' + | 'ARB_SEPOLIA' + | 'OPT_MAINNET' + | 'OPT_SEPOLIA' + | 'BASE_MAINNET' + | 'BASE_SEPOLIA' + +const ALCHEMY_API_BASE = 'https://dashboard.alchemy.com/api' + +interface CreateWebhookParams { + network: AlchemyNetwork + webhookUrl: string + addresses?: string[] +} + +interface CreateWebhookResponse { + data: { + id: string + network: string + webhook_type: string + webhook_url: string + is_active: boolean + time_created: number + signing_key: string + version: string + app_id?: string + } +} + +interface UpdateWebhookAddressesParams { + webhookId: string + addressesToAdd?: string[] + addressesToRemove?: string[] +} + +interface GetWebhookAddressesParams { + webhookId: string + limit?: number + after?: string +} + +interface GetWebhookAddressesResponse { + data: string[] + pagination: { + cursors: { + after?: string + } + total_count: number + } +} + +export interface AlchemyNotifyApi { + createWebhook: (params: CreateWebhookParams) => Promise + updateWebhookAddresses: ( + params: UpdateWebhookAddressesParams + ) => Promise + getWebhookAddresses: ( + params: GetWebhookAddressesParams + ) => Promise + deleteWebhook: (webhookId: string) => Promise +} + +export function makeAlchemyNotifyApi(logger: Logger): AlchemyNotifyApi { + function getAuthToken(): string { + const apiKeys = serviceKeysFromUrl( + serverConfig.serviceKeys, + 'https://dashboard.alchemy.com' + ) + const apiKey = pickRandom(apiKeys) + if (apiKey == null || apiKey === '') { + throw new Error( + 'Missing Alchemy Auth Token in serviceKeys for dashboard.alchemy.com' + ) + } + return apiKey + } + + async function apiRequest( + endpoint: string, + method: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE', + body?: object + ): Promise { + const authToken = getAuthToken() + const url = `${ALCHEMY_API_BASE}${endpoint}` + + logger.info({ method, endpoint, msg: 'Alchemy API request' }) + + const response = await fetch(url, { + method, + headers: { + 'Content-Type': 'application/json', + 'X-Alchemy-Token': authToken + }, + body: body != null ? JSON.stringify(body) : undefined + }) + + if (!response.ok) { + const errorText = await response.text() + throw new Error( + `Alchemy API error: ${response.status} ${response.statusText} - ${errorText}` + ) + } + + // Some endpoints return empty body (204) + if (response.status === 204) { + return (undefined as unknown) as T + } + + return (await response.json()) as T + } + + return { + async createWebhook( + params: CreateWebhookParams + ): Promise { + return await apiRequest( + '/create-webhook', + 'POST', + { + network: params.network, + webhook_type: 'ADDRESS_ACTIVITY', + webhook_url: params.webhookUrl, + addresses: params.addresses ?? [] + } + ) + }, + + async updateWebhookAddresses( + params: UpdateWebhookAddressesParams + ): Promise { + await apiRequest('/update-webhook-addresses', 'PATCH', { + webhook_id: params.webhookId, + addresses_to_add: params.addressesToAdd ?? [], + addresses_to_remove: params.addressesToRemove ?? [] + }) + }, + + async getWebhookAddresses( + params: GetWebhookAddressesParams + ): Promise { + const queryParams = new URLSearchParams({ + webhook_id: params.webhookId + }) + if (params.limit != null) { + queryParams.set('limit', params.limit.toString()) + } + if (params.after != null) { + queryParams.set('after', params.after) + } + + return await apiRequest( + `/webhook-addresses?${queryParams.toString()}`, + 'GET' + ) + }, + + async deleteWebhook(webhookId: string): Promise { + await apiRequest('/delete-webhook', 'DELETE', { + webhook_id: webhookId + }) + } + } +} diff --git a/src/util/alchemyWebhookHandler.ts b/src/util/alchemyWebhookHandler.ts new file mode 100644 index 0000000..5143e1e --- /dev/null +++ b/src/util/alchemyWebhookHandler.ts @@ -0,0 +1,242 @@ +import crypto from 'crypto' +import http from 'http' + +import { serverConfig } from '../serverConfig' +import { Logger, makeLogger } from './logger' + +/** + * Activity event from Alchemy Address Activity webhook + */ +export interface AlchemyActivity { + blockNum: string + hash: string + fromAddress: string + toAddress: string + value: number + erc721TokenId: string | null + erc1155Metadata: Array<{ tokenId: string; value: string }> | null + asset: string + category: 'external' | 'internal' | 'erc20' | 'erc721' | 'erc1155' | 'token' + rawContract: { + rawValue: string + address: string + decimals: number + } + typeTraceAddress: string | null + log?: { + address: string + topics: string[] + data: string + blockNumber: string + transactionHash: string + transactionIndex: string + blockHash: string + logIndex: string + removed: boolean + } +} + +/** + * Webhook payload from Alchemy Address Activity webhook + */ +export interface AlchemyWebhookPayload { + webhookId: string + id: string + createdAt: string + type: 'ADDRESS_ACTIVITY' + event: { + network: string + activity: AlchemyActivity[] + } +} + +export type WebhookActivityHandler = ( + network: string, + activity: AlchemyActivity[] +) => void + +export interface AlchemyWebhookHandler { + /** Register a handler for a specific network */ + registerNetworkHandler: ( + network: string, + handler: WebhookActivityHandler + ) => void + + /** Unregister a handler for a specific network */ + unregisterNetworkHandler: (network: string) => void + + /** Start the HTTP server to receive webhooks */ + start: () => void + + /** Stop the HTTP server */ + stop: () => void +} + +/** + * Creates an HTTP server that receives Alchemy webhook callbacks, + * validates signatures, and routes activity events to registered handlers. + */ +export function makeAlchemyWebhookHandler(): AlchemyWebhookHandler { + const logger: Logger = makeLogger('alchemy-webhook') + const networkHandlers = new Map() + let server: http.Server | null = null + + /** + * Validates the webhook signature using HMAC-SHA256 + */ + function validateSignature(rawBody: string, signature: string): boolean { + const signingKey = serverConfig.alchemyWebhookSigningKey + if (signingKey === '') { + logger.error({ msg: 'Missing alchemyWebhookSigningKey in config' }) + return false + } + + const hmac = crypto.createHmac('sha256', signingKey) + hmac.update(rawBody, 'utf8') + const expectedSignature = hmac.digest('hex') + + // Use timing-safe comparison to prevent timing attacks + try { + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expectedSignature) + ) + } catch { + // Lengths don't match + return false + } + } + + /** + * Handle incoming HTTP requests + */ + function handleRequest( + req: http.IncomingMessage, + res: http.ServerResponse + ): void { + // Only accept POST requests to /webhook + if (req.method !== 'POST') { + res.writeHead(405, { 'Content-Type': 'text/plain' }) + res.end('Method Not Allowed') + return + } + + const host = req.headers.host + if (host == null) { + res.writeHead(400, { 'Content-Type': 'text/plain' }) + res.end('Missing host') + return + } + const url = new URL(req.url ?? '/', `http://${host}`) + if (url.pathname !== '/webhook' && url.pathname !== '/webhook/alchemy') { + res.writeHead(404, { 'Content-Type': 'text/plain' }) + res.end('Not Found') + return + } + + // Collect request body + const chunks: Buffer[] = [] + req.on('data', (chunk: Buffer) => { + chunks.push(chunk) + }) + + req.on('end', () => { + const rawBody = Buffer.concat(chunks).toString('utf8') + + // Validate signature + const signature = req.headers['x-alchemy-signature'] + if (typeof signature !== 'string') { + logger.warn({ msg: 'Missing X-Alchemy-Signature header' }) + res.writeHead(401, { 'Content-Type': 'text/plain' }) + res.end('Missing signature') + return + } + + if (!validateSignature(rawBody, signature)) { + logger.warn({ msg: 'Invalid webhook signature' }) + res.writeHead(401, { 'Content-Type': 'text/plain' }) + res.end('Invalid signature') + return + } + + // Parse payload + let payload: AlchemyWebhookPayload + try { + payload = JSON.parse(rawBody) as AlchemyWebhookPayload + } catch (error) { + logger.error({ err: error, msg: 'Failed to parse webhook payload' }) + res.writeHead(400, { 'Content-Type': 'text/plain' }) + res.end('Invalid JSON') + return + } + + // Validate payload type + if (payload.type !== 'ADDRESS_ACTIVITY') { + logger.warn({ type: payload.type, msg: 'Unexpected webhook type' }) + res.writeHead(200, { 'Content-Type': 'text/plain' }) + res.end('OK') + return + } + + // Route to network handler + const network = payload.event.network + const handler = networkHandlers.get(network) + + if (handler != null) { + try { + handler(network, payload.event.activity) + } catch (error) { + logger.error({ err: error, network, msg: 'Error in network handler' }) + } + } else { + logger.warn({ network, msg: 'No handler registered for network' }) + } + + // Always respond 200 to acknowledge receipt + res.writeHead(200, { 'Content-Type': 'text/plain' }) + res.end('OK') + }) + + req.on('error', (error: Error) => { + logger.error({ err: error, msg: 'Request error' }) + res.writeHead(500, { 'Content-Type': 'text/plain' }) + res.end('Internal Server Error') + }) + } + + return { + registerNetworkHandler(network: string, handler: WebhookActivityHandler) { + networkHandlers.set(network, handler) + logger.info({ network, msg: 'Registered webhook handler' }) + }, + + unregisterNetworkHandler(network: string) { + networkHandlers.delete(network) + logger.info({ network, msg: 'Unregistered webhook handler' }) + }, + + start() { + if (server != null) { + logger.warn({ msg: 'Webhook server already running' }) + return + } + + const { webhookHost, webhookPort } = serverConfig + server = http.createServer(handleRequest) + server.listen(webhookPort, webhookHost) + logger.info({ + host: webhookHost, + port: webhookPort, + msg: 'Webhook server listening' + }) + }, + + stop() { + if (server != null) { + server.close() + server = null + logger.info({ msg: 'Webhook server stopped' }) + } + } + } +} diff --git a/src/util/authenticateUrl.ts b/src/util/authenticateUrl.ts new file mode 100644 index 0000000..4d62d1f --- /dev/null +++ b/src/util/authenticateUrl.ts @@ -0,0 +1,14 @@ +import { serverConfig } from '../serverConfig' +import { pickRandom } from './pickRandom' +import { replaceUrlParams } from './replaceUrlParams' +import { serviceKeysFromUrl } from './serviceKeys' + +export function authenticateUrl( + url: string, + keyName: string = 'apiKey' +): string { + const apiKeys = serviceKeysFromUrl(serverConfig.serviceKeys, url) + const apiKey = pickRandom(apiKeys) ?? '' + const authenticatedUrl = replaceUrlParams(url, { [keyName]: apiKey }) + return authenticatedUrl +} diff --git a/src/util/logger.ts b/src/util/logger.ts new file mode 100644 index 0000000..c272cd6 --- /dev/null +++ b/src/util/logger.ts @@ -0,0 +1,39 @@ +import pino from 'pino' + +export type Logger = pino.Logger + +// Single base logger instance +export const logger = pino({ + enabled: process.env.NODE_ENV !== 'test', + timestamp: pino.stdTimeFunctions.isoTime, + formatters: { + log(object) { + const { time, scope, ...rest } = object + return { + ...rest, + ...(time != null + ? { "Warning: 'time' field because it's reserved for Pino": time } + : {}), + ...(scope != null + ? { + "Warning: 'scope' field because it's reserved for logging context": scope + } + : {}) + } + } + } +}) + +/** + * Creates a scoped logger using Pino's .child() pattern. + * Adds a "scope" field to identify the logging context (e.g., "blockbook", "evmRpc", "socket"). + * + * @param scope - The scope identifier + * @param chainPluginId - Optional chain plugin ID (e.g., "ethereum", "arbitrum") + */ +export function makeLogger(scope: string, chainPluginId?: string): Logger { + return logger.child({ + scope, + ...(chainPluginId != null ? { chainPluginId } : {}) + }) +} diff --git a/src/util/pickRandom.ts b/src/util/pickRandom.ts index 7519311..028af3c 100644 --- a/src/util/pickRandom.ts +++ b/src/util/pickRandom.ts @@ -4,6 +4,6 @@ * @param arr - The array to pick from. * @returns A random element from the array. */ -export function pickRandom(arr: T[]): T { +export function pickRandom(arr: T[]): T | undefined { return arr[Math.floor(Math.random() * arr.length)] } diff --git a/src/util/replaceUrlParams.ts b/src/util/replaceUrlParams.ts new file mode 100644 index 0000000..14f0be8 --- /dev/null +++ b/src/util/replaceUrlParams.ts @@ -0,0 +1,15 @@ +/** + * Replaces {{paramName}} placeholders in a URL with supplied params. + * Returns the URL unchanged if no placeholders are found or params are not + * provided. + */ +export function replaceUrlParams( + url: string, + params: Record +): string { + let result = url + for (const [key, value] of Object.entries(params)) { + result = result.replace(`{{${key}}}`, value) + } + return result +} diff --git a/src/util/scanAdapters/EtherscanV1ScanAdapter.ts b/src/util/scanAdapters/EtherscanV1ScanAdapter.ts index b9b9c7a..0de34e3 100644 --- a/src/util/scanAdapters/EtherscanV1ScanAdapter.ts +++ b/src/util/scanAdapters/EtherscanV1ScanAdapter.ts @@ -1,8 +1,9 @@ import { asArray, asObject, asString, asUnknown } from 'cleaners' import { serverConfig } from '../../serverConfig' -import { Logger } from '../../types' +import { Logger } from '../logger' import { pickRandom } from '../pickRandom' +import { serviceKeysFromUrl } from '../serviceKeys' import { ScanAdapter } from './scanAdapterTypes' export interface EtherscanV1ScanAdapterConfig { @@ -34,19 +35,26 @@ export function makeEtherscanV1ScanAdapter( }) // Use a random API URL: const url = pickRandom(urls) - const host = new URL(url).host - const apiKeys = serverConfig.serviceKeys[host] - if (apiKeys == null) { - logger.warn('No API key found for', host) + if (url == null) { + logger.error({ msg: 'No URLs for EtherscanV1ScanAdapter provided' }) + return false } - // Use a random API key: - const apiKey = apiKeys == null ? undefined : pickRandom(apiKeys) + const apiKeys = serviceKeysFromUrl(serverConfig.serviceKeys, url) + const apiKey = pickRandom(apiKeys) if (apiKey != null) { params.set('apikey', apiKey) + } else { + logger.warn({ url, msg: 'No API key found, proceeding without one' }) } const response = await fetch(`${url}/api?${params.toString()}`) if (response.status !== 200) { - logger.error('scanAddress error', response.status, response.statusText) + const text = await response.text().catch(() => '') + logger.error({ + status: response.status, + statusText: response.statusText, + responseText: text, + msg: 'scanAddress error' + }) return true } const dataRaw = await response.json() @@ -69,11 +77,11 @@ export function makeEtherscanV1ScanAdapter( } const tokenResponse = await fetch(`${url}/api?${tokenParams.toString()}`) if (tokenResponse.status !== 200) { - logger.error( - 'scanAddress tokenTx error', - tokenResponse.status, - tokenResponse.statusText - ) + logger.error({ + status: tokenResponse.status, + statusText: tokenResponse.statusText, + msg: 'scanAddress tokenTx error' + }) return false } const tokenDataRaw = await tokenResponse.json() diff --git a/src/util/scanAdapters/EtherscanV2ScanAdapter.ts b/src/util/scanAdapters/EtherscanV2ScanAdapter.ts index 68dfcd1..1cec606 100644 --- a/src/util/scanAdapters/EtherscanV2ScanAdapter.ts +++ b/src/util/scanAdapters/EtherscanV2ScanAdapter.ts @@ -1,8 +1,9 @@ import { asArray, asObject, asString, asUnknown } from 'cleaners' import { serverConfig } from '../../serverConfig' -import { Logger } from '../../types' +import { Logger } from '../logger' import { pickRandom } from '../pickRandom' +import { serviceKeysFromUrl } from '../serviceKeys' import { ScanAdapter } from './scanAdapterTypes' export interface EtherscanV2ScanAdapterConfig { @@ -36,26 +37,38 @@ export function makeEtherscanV2ScanAdapter( }) // Use a random API URL: const url = pickRandom(urls) - const host = new URL(url).host - const apiKeys = serverConfig.serviceKeys[host] - if (apiKeys == null) { - logger.warn('No API key found for', host) + if (url == null) { + logger.error({ msg: 'No URLs for EtherscanV2ScanAdapter provided' }) + return false } - // Use a random API key: - const apiKey = apiKeys == null ? undefined : pickRandom(apiKeys) + const apiKeys = serviceKeysFromUrl(serverConfig.serviceKeys, url) + const apiKey = pickRandom(apiKeys) if (apiKey != null) { params.set('apikey', apiKey) + } else { + logger.warn({ url, msg: 'No API key found, proceeding without one' }) } const response = await fetchEtherscanV2(url, params) if ('error' in response) { - logger.error( - 'scanAddress error', - response.httpStatus, - response.httpStatusText - ) + logger.error({ + status: response.httpStatus, + statusText: response.httpStatusText, + responseText: response.responseText, + msg: 'scanAddress error' + }) return true } - const transactionData = asResult(response.json) + let transactionData: ReturnType + try { + transactionData = asResult(response.json) + } catch (error) { + logger.error({ + err: error, + msg: 'scanAddress asResult cleaner error', + response: String(JSON.stringify(response.json)) + }) + throw error + } if (transactionData.status === '1' && transactionData.result.length > 0) { return true } @@ -75,11 +88,11 @@ export function makeEtherscanV2ScanAdapter( } const tokenResponse = await fetchEtherscanV2(url, tokenParams) if ('error' in tokenResponse) { - logger.error( - 'scanAddress tokenTx error', - tokenResponse.httpStatus, - tokenResponse.httpStatusText - ) + logger.error({ + status: tokenResponse.httpStatus, + statusText: tokenResponse.httpStatusText, + msg: 'scanAddress tokenTx error' + }) return false } const tokenData = asResult(tokenResponse.json) @@ -102,14 +115,24 @@ export function makeEtherscanV2ScanAdapter( } const internalResponse = await fetchEtherscanV2(url, internalParams) if ('error' in internalResponse) { - logger.error( - 'scanAddress internalTx error', - internalResponse.httpStatus, - internalResponse.httpStatusText - ) + logger.error({ + status: internalResponse.httpStatus, + statusText: internalResponse.httpStatusText, + msg: 'scanAddress internalTx error' + }) return false } - const internalData = asResult(internalResponse.json) + let internalData: ReturnType + try { + internalData = asResult(internalResponse.json) + } catch (error) { + logger.error({ + err: error, + msg: 'scanAddress asResult cleaner error', + response: String(JSON.stringify(internalResponse.json)) + }) + throw error + } if (internalData.status === '1' && internalData.result.length > 0) { return true } @@ -128,7 +151,12 @@ type EtherscanResult = json: unknown httpStatus: number } - | { error: boolean; httpStatus: number; httpStatusText: string } + | { + error: boolean + httpStatus: number + httpStatusText: string + responseText: string + } async function fetchEtherscanV2( url: string, @@ -136,10 +164,12 @@ async function fetchEtherscanV2( ): Promise { const response = await fetch(`${url}/v2/api?${params.toString()}`) if (response.status !== 200) { + const text = await response.text().catch(() => '') return { error: true, httpStatus: response.status, - httpStatusText: response.statusText + httpStatusText: response.statusText, + responseText: text } } diff --git a/src/util/serviceKeys.ts b/src/util/serviceKeys.ts new file mode 100644 index 0000000..d029e27 --- /dev/null +++ b/src/util/serviceKeys.ts @@ -0,0 +1,55 @@ +import { asArray, asObject, asString, Cleaner } from 'cleaners' + +/** + * The service keys map will map from a hostname to a list of API keys. + * + * For example, the service keys map might look like this: + * + * ``` + * { + * 'api.example.com:443': ['key1', 'key2'], + * 'api.example.com': ['key3', 'key4'], + * 'example.com': ['key3', 'key4'], + * } + * ``` + * More specific hostnames will take precedence over less specific ones (e.g. + * api.example.com:443 over api.example.com over example.com). + */ +export interface ServiceKeys { + [domain: string]: string[] +} +export const asServiceKeys: Cleaner = asObject(asArray(asString)) + +/** + * Returns a service key for the given URL by matching the host (with or + * without port) against the serviceKeys map. It checks subdomains as well. + * For example, "https://api.example.com:443" will first look for a + * key for "api.example.com:443", then "api.example.com", then + * "example.com:433". Returns a random key from the matching list if found. + */ +export function serviceKeysFromUrl( + serviceKeys: ServiceKeys, + url: string +): string[] { + const urlObj = new URL(url) + const fullDomain = urlObj.hostname + const domainParts = fullDomain.split('.') + let apiKeys: string[] = [] + // Try matching at each domain level, from most specific (full) to least + for (let i = 0; i <= domainParts.length - 2; i++) { + const domain = domainParts.slice(i).join('.') + const candidateWithPort = + urlObj.port !== '' ? `${domain}:${urlObj.port}` : domain + apiKeys = serviceKeys[candidateWithPort] + if (apiKeys == null) { + apiKeys = serviceKeys[domain] + } + if (apiKeys != null) { + break + } + } + if (apiKeys != null) { + return apiKeys + } + return [] +} diff --git a/test/hub.test.ts b/test/hub.test.ts index 05c33db..c42a43c 100644 --- a/test/hub.test.ts +++ b/test/hub.test.ts @@ -68,8 +68,9 @@ describe('AddressHub', function () { host, port }) - serverWs.on('connection', ws => { - hub.handleConnection(ws) + serverWs.on('connection', (ws, req) => { + const ip = req.socket.remoteAddress ?? 'unknown' + hub.handleConnection(ws, ip) }) }) afterAll(() => { diff --git a/test/plugins/EtherscanV1ScanAdapter.test.ts b/test/plugins/EtherscanV1ScanAdapter.test.ts index 6bba48b..15b6830 100644 --- a/test/plugins/EtherscanV1ScanAdapter.test.ts +++ b/test/plugins/EtherscanV1ScanAdapter.test.ts @@ -1,6 +1,6 @@ import { afterAll, beforeAll, describe, expect, it, jest } from '@jest/globals' -import { Logger } from '../../src/types' +import { Logger } from '../../src/util/logger' import { makeEtherscanV1ScanAdapter } from '../../src/util/scanAdapters/EtherscanV1ScanAdapter' import { mswServer } from '../util/mswServer' @@ -14,11 +14,11 @@ describe('EtherscanV1ScanAdapter', function () { const TOKEN_TRANSACTION_HEIGHT = 22499360 // Mock logger for testing - const logger: Logger = { - log: jest.fn(), + const logger = ({ + info: jest.fn(), warn: jest.fn(), error: jest.fn() - } + } as unknown) as Logger const adapter = makeEtherscanV1ScanAdapter( { diff --git a/test/plugins/EtherscanV2ScanAdapter.test.ts b/test/plugins/EtherscanV2ScanAdapter.test.ts index 487bb5a..f76ac27 100644 --- a/test/plugins/EtherscanV2ScanAdapter.test.ts +++ b/test/plugins/EtherscanV2ScanAdapter.test.ts @@ -1,6 +1,6 @@ import { afterAll, beforeAll, describe, expect, it, jest } from '@jest/globals' -import { Logger } from '../../src/types' +import { Logger } from '../../src/util/logger' import { makeEtherscanV2ScanAdapter } from '../../src/util/scanAdapters/EtherscanV2ScanAdapter' import { mswServer } from '../util/mswServer' @@ -15,11 +15,11 @@ describe('EtherscanV2ScanAdapter', function () { const TOKEN_TRANSACTION_HEIGHT = 22499360 // Mock logger for testing - const logger: Logger = { - log: jest.fn(), + const logger = ({ + info: jest.fn(), warn: jest.fn(), error: jest.fn() - } + } as unknown) as Logger const adapter = makeEtherscanV2ScanAdapter( { diff --git a/test/plugins/alchemy.test.ts b/test/plugins/alchemy.test.ts new file mode 100644 index 0000000..2bcfd28 --- /dev/null +++ b/test/plugins/alchemy.test.ts @@ -0,0 +1,391 @@ +import { + afterEach, + beforeEach, + describe, + expect, + jest, + test +} from '@jest/globals' + +import { makeAlchemy } from '../../src/plugins/alchemy' +import { AddressPlugin } from '../../src/types/addressPlugin' +import { + AlchemyActivity, + AlchemyWebhookHandler, + WebhookActivityHandler +} from '../../src/util/alchemyWebhookHandler' + +// Mock the Alchemy Notify API +jest.mock('../../src/util/alchemyNotifyApi', () => { + return { + makeAlchemyNotifyApi: jest.fn(() => ({ + createWebhook: jest.fn().mockImplementation(async () => ({ + data: { + id: 'test-webhook-id', + network: 'ETH_MAINNET', + webhook_type: 'ADDRESS_ACTIVITY', + webhook_url: 'https://test.com/webhook', + is_active: true, + time_created: Date.now(), + signing_key: 'test-signing-key', + version: 'V2' + } + })), + updateWebhookAddresses: jest + .fn() + .mockImplementation(async () => undefined), + getWebhookAddresses: jest.fn().mockImplementation(async () => ({ + data: [], + pagination: { cursors: {}, total_count: 0 } + })), + deleteWebhook: jest.fn().mockImplementation(async () => undefined) + })) + } +}) + +// Mock server config +jest.mock('../../src/serverConfig', () => ({ + serverConfig: { + publicUri: 'https://test.edge.app', + alchemyWebhookSigningKey: 'test-signing-key', + webhookHost: '127.0.0.1', + webhookPort: 8010, + serviceKeys: { + 'dashboard.alchemy.com': ['test-api-key'] + } + } +})) + +describe('Alchemy plugin', () => { + const TEST_ADDRESS = '0xF5335367A46c2484f13abd051444E39775EA7b60' + const TEST_ADDRESS_LOWERCASE = TEST_ADDRESS.toLowerCase() + const TEST_SECOND_ADDRESS = '0x6B175474E89094C44Da98b954EedeAC495271d0F' + + let plugin: AddressPlugin + let mockWebhookHandler: AlchemyWebhookHandler + let registeredHandler: WebhookActivityHandler | null = null + + function callRegisteredHandler( + network: string, + activity: AlchemyActivity[] + ): void { + if (registeredHandler == null) { + throw new Error('registeredHandler is null') + } + registeredHandler(network, activity) + } + + beforeEach(() => { + jest.clearAllMocks() + jest.useFakeTimers() + registeredHandler = null + + // Create mock webhook handler + mockWebhookHandler = { + registerNetworkHandler: jest.fn( + (network: string, handler: WebhookActivityHandler) => { + registeredHandler = handler + } + ), + unregisterNetworkHandler: jest.fn(), + start: jest.fn(), + stop: jest.fn() + } + + plugin = makeAlchemy({ + pluginId: 'ethereum', + network: 'ETH_MAINNET', + webhookHandler: mockWebhookHandler + }) + }) + + afterEach(() => { + plugin.destroy?.() + jest.useRealTimers() + jest.clearAllMocks() + }) + + test('plugin instantiation', () => { + expect(plugin.pluginId).toBe('ethereum') + expect(mockWebhookHandler.registerNetworkHandler).toHaveBeenCalledWith( + 'ETH_MAINNET', + expect.any(Function) + ) + }) + + test('subscribe should return true', async () => { + const result = await plugin.subscribe(TEST_ADDRESS) + expect(result).toBe(true) + }) + + test('subscribe should be idempotent', async () => { + await plugin.subscribe(TEST_ADDRESS) + const result = await plugin.subscribe(TEST_ADDRESS) + expect(result).toBe(true) + }) + + test('unsubscribe should return true for subscribed address', async () => { + await plugin.subscribe(TEST_ADDRESS) + const result = await plugin.unsubscribe(TEST_ADDRESS) + expect(result).toBe(true) + }) + + test('unsubscribe should return false for non-subscribed address', async () => { + const result = await plugin.unsubscribe(TEST_ADDRESS) + expect(result).toBe(false) + }) + + test('webhook activity should trigger update event for from address', async () => { + await plugin.subscribe(TEST_ADDRESS) + + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + // Simulate incoming webhook activity + const activity: AlchemyActivity[] = [ + { + blockNum: '0x100', + hash: '0xabc', + fromAddress: TEST_ADDRESS_LOWERCASE, + toAddress: '0xdef', + value: 1.0, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'ETH', + category: 'external', + rawContract: { + rawValue: '0x', + address: '', + decimals: 18 + }, + typeTraceAddress: null + } + ] + + // Call the registered handler + expect(registeredHandler).not.toBeNull() + callRegisteredHandler('ETH_MAINNET', activity) + + expect(updateHandler).toHaveBeenCalledWith({ + address: TEST_ADDRESS, + checkpoint: '256' // 0x100 in decimal + }) + }) + + test('webhook activity should trigger update event for to address', async () => { + await plugin.subscribe(TEST_ADDRESS) + + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + const activity: AlchemyActivity[] = [ + { + blockNum: '0x200', + hash: '0xdef', + fromAddress: '0xabc', + toAddress: TEST_ADDRESS_LOWERCASE, + value: 2.0, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'ETH', + category: 'external', + rawContract: { + rawValue: '0x', + address: '', + decimals: 18 + }, + typeTraceAddress: null + } + ] + + callRegisteredHandler('ETH_MAINNET', activity) + + expect(updateHandler).toHaveBeenCalledWith({ + address: TEST_ADDRESS, + checkpoint: '512' // 0x200 in decimal + }) + }) + + test('webhook activity should not trigger for unsubscribed addresses', async () => { + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + const activity: AlchemyActivity[] = [ + { + blockNum: '0x100', + hash: '0xabc', + fromAddress: TEST_ADDRESS_LOWERCASE, + toAddress: '0xdef', + value: 1.0, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'ETH', + category: 'external', + rawContract: { + rawValue: '0x', + address: '', + decimals: 18 + }, + typeTraceAddress: null + } + ] + + callRegisteredHandler('ETH_MAINNET', activity) + + expect(updateHandler).not.toHaveBeenCalled() + }) + + test('multiple addresses should all receive updates', async () => { + await plugin.subscribe(TEST_ADDRESS) + await plugin.subscribe(TEST_SECOND_ADDRESS) + + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + const activity: AlchemyActivity[] = [ + { + blockNum: '0x300', + hash: '0xghi', + fromAddress: TEST_ADDRESS_LOWERCASE, + toAddress: TEST_SECOND_ADDRESS.toLowerCase(), + value: 1.0, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'ETH', + category: 'external', + rawContract: { + rawValue: '0x', + address: '', + decimals: 18 + }, + typeTraceAddress: null + } + ] + + callRegisteredHandler('ETH_MAINNET', activity) + + expect(updateHandler).toHaveBeenCalledTimes(2) + expect(updateHandler).toHaveBeenCalledWith({ + address: TEST_ADDRESS, + checkpoint: '768' + }) + expect(updateHandler).toHaveBeenCalledWith({ + address: TEST_SECOND_ADDRESS, + checkpoint: '768' + }) + }) + + test('address normalization preserves original case', async () => { + const mixedCaseAddress = '0xAbCdEf1234567890AbCdEf1234567890AbCdEf12' + await plugin.subscribe(mixedCaseAddress) + + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + const activity: AlchemyActivity[] = [ + { + blockNum: '0x100', + hash: '0xabc', + fromAddress: mixedCaseAddress.toLowerCase(), + toAddress: '0xdef', + value: 1.0, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'ETH', + category: 'external', + rawContract: { + rawValue: '0x', + address: '', + decimals: 18 + }, + typeTraceAddress: null + } + ] + + callRegisteredHandler('ETH_MAINNET', activity) + + // Should emit with original case + expect(updateHandler).toHaveBeenCalledWith({ + address: mixedCaseAddress, + checkpoint: '256' + }) + }) + + test('destroy should unregister handler and clear state', async () => { + await plugin.subscribe(TEST_ADDRESS) + + plugin.destroy?.() + + expect(mockWebhookHandler.unregisterNetworkHandler).toHaveBeenCalledWith( + 'ETH_MAINNET' + ) + }) + + test('ERC20 token transfer activity should trigger update', async () => { + await plugin.subscribe(TEST_ADDRESS) + + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + const activity: AlchemyActivity[] = [ + { + blockNum: '0x400', + hash: '0xtoken', + fromAddress: TEST_ADDRESS_LOWERCASE, + toAddress: '0xrecipient', + value: 100.0, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'USDC', + category: 'erc20', + rawContract: { + rawValue: '0x5f5e100', + address: '0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48', + decimals: 6 + }, + typeTraceAddress: null + } + ] + + callRegisteredHandler('ETH_MAINNET', activity) + + expect(updateHandler).toHaveBeenCalledWith({ + address: TEST_ADDRESS, + checkpoint: '1024' + }) + }) + + test('internal transfer activity should trigger update', async () => { + await plugin.subscribe(TEST_ADDRESS) + + const updateHandler = jest.fn() + plugin.on('update', updateHandler) + + const activity: AlchemyActivity[] = [ + { + blockNum: '0x500', + hash: '0xinternal', + fromAddress: '0xcontract', + toAddress: TEST_ADDRESS_LOWERCASE, + value: 0.5, + erc721TokenId: null, + erc1155Metadata: null, + asset: 'ETH', + category: 'internal', + rawContract: { + rawValue: '0x', + address: '', + decimals: 18 + }, + typeTraceAddress: 'call_0_1' + } + ] + + callRegisteredHandler('ETH_MAINNET', activity) + + expect(updateHandler).toHaveBeenCalledWith({ + address: TEST_ADDRESS, + checkpoint: '1280' + }) + }) +}) diff --git a/test/plugins/blockbook.test.ts b/test/plugins/blockbook.test.ts index 63ce4e5..8555453 100644 --- a/test/plugins/blockbook.test.ts +++ b/test/plugins/blockbook.test.ts @@ -12,9 +12,9 @@ import WebSocket from 'ws' import { messageToString } from '../../src/messageToString' import { makeBlockbook } from '../../src/plugins/blockbook' -import { serverConfig } from '../../src/serverConfig' import { AddressPlugin } from '../../src/types/addressPlugin' import { blockbookProtocol } from '../../src/types/blockbookProtocol' +import { authenticateUrl } from '../../src/util/authenticateUrl' // Enable this for debug testing against a real server. It may break some tests. const USE_REAL_BLOCKBOOK_SERVER = false @@ -27,7 +27,7 @@ describe('blockbook plugin', function () { const host = 'localhost' const port = Math.floor(Math.random() * 1000 + 5000) const mockBlockbookUrl = USE_REAL_BLOCKBOOK_SERVER - ? `wss://btcbook.nownodes.io/wss/{nowNodesApiKey}` + ? authenticateUrl('wss://btcbook.nownodes.io/wss/{{apiKey}}') : `ws://${host}:${port}` const blockbookWsServer = new WebSocket.Server({ @@ -169,9 +169,7 @@ describe('blockbook plugin', function () { beforeEach(() => { plugin = makeBlockbook({ pluginId: 'test', - url: mockBlockbookUrl, - // For testing real blockbook server, we need to provide the API key - nowNodesApiKey: serverConfig.nowNodesApiKey + url: mockBlockbookUrl }) }) afterEach(() => { diff --git a/test/plugins/evmRpc.test.ts b/test/plugins/evmRpc.test.ts index 0ee25cb..a1edc29 100644 --- a/test/plugins/evmRpc.test.ts +++ b/test/plugins/evmRpc.test.ts @@ -83,12 +83,6 @@ describe('evmRpc plugin', function () { const mockUrl = 'https://ethereum.example.com/rpc' - const consoleSpy = { - log: jest.spyOn(console, 'log').mockImplementation(() => {}), - warn: jest.spyOn(console, 'warn').mockImplementation(() => {}), - error: jest.spyOn(console, 'error').mockImplementation(() => {}) - } - let plugin: AddressPlugin beforeAll(() => { @@ -147,10 +141,6 @@ describe('evmRpc plugin', function () { }) }) expect(mockClient.watchBlocks).toHaveBeenCalled() - // Verify onResponse was set up (if transport exists) - if (mockClient.transport != null) { - expect(mockClient.transport.onResponse).toHaveBeenCalled() - } }) test('subscribe should return true', async function () { @@ -429,20 +419,11 @@ describe('evmRpc plugin', function () { expect(result).toBe(false) }) - test('watchBlocks error handler should log errors', async function () { + test('watchBlocks error handler should handle errors gracefully', async function () { // Get the error handler that was passed to watchBlocks const errorHandler = mockClient.watchBlocks.mock.calls[0][0].onError - // Call the error handler - errorHandler(new Error('Test error')) - - // Check that the error was logged - // The log prefix includes the plugin ID and URL (which is picked randomly from urls array) - expect(consoleSpy.error).toHaveBeenCalled() - const errorCall = consoleSpy.error.mock.calls[0] - expect(errorCall[0]).toContain('test-evm (') - expect(errorCall[0]).toContain(mockUrl) - expect(errorCall[1]).toBe('watchBlocks error') - expect(errorCall[2]).toBeInstanceOf(Error) + // Call the error handler - should not throw + expect(() => errorHandler(new Error('Test error'))).not.toThrow() }) }) diff --git a/test/util/serviceKeys.test.ts b/test/util/serviceKeys.test.ts new file mode 100644 index 0000000..9a6ee41 --- /dev/null +++ b/test/util/serviceKeys.test.ts @@ -0,0 +1,265 @@ +import { describe, expect, test } from '@jest/globals' + +import { + asServiceKeys, + ServiceKeys, + serviceKeysFromUrl +} from '../../src/util/serviceKeys' + +describe('asServiceKeys', function () { + test('valid service keys object', function () { + const input = { + 'api.example.com': ['key1', 'key2'], + 'example.com': ['key3'] + } + const result = asServiceKeys(input) + expect(result).toEqual(input) + }) + + test('empty object is valid', function () { + const input = {} + const result = asServiceKeys(input) + expect(result).toEqual({}) + }) + + test('rejects non-string keys in array', function () { + const input = { + 'api.example.com': ['key1', 123] + } + expect(() => asServiceKeys(input)).toThrow() + }) + + test('rejects non-array values', function () { + const input = { + 'api.example.com': 'key1' + } + expect(() => asServiceKeys(input)).toThrow() + }) +}) + +describe('serviceKeysFromUrl', function () { + describe('exact hostname matching', function () { + test('matches exact hostname', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['key1', 'key2'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path' + ) + expect(result).toEqual(['key1', 'key2']) + }) + + test('matches hostname with explicit port', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com:8080': ['portKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:8080/path' + ) + expect(result).toEqual(['portKey']) + }) + + test('prefers hostname with port over hostname without port', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com:8080': ['portKey'], + 'api.example.com': ['noPortKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:8080/path' + ) + expect(result).toEqual(['portKey']) + }) + + test('falls back to hostname without port when port key not found', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['noPortKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:8080/path' + ) + expect(result).toEqual(['noPortKey']) + }) + }) + + describe('subdomain matching', function () { + test('matches parent domain when subdomain not found', function () { + const serviceKeys: ServiceKeys = { + 'example.com': ['parentKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path' + ) + expect(result).toEqual(['parentKey']) + }) + + test('prefers more specific subdomain over parent domain', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['subdomainKey'], + 'example.com': ['parentKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path' + ) + expect(result).toEqual(['subdomainKey']) + }) + + test('matches deeply nested subdomains', function () { + const serviceKeys: ServiceKeys = { + 'example.com': ['rootKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://deep.nested.api.example.com/path' + ) + expect(result).toEqual(['rootKey']) + }) + + test('matches at correct subdomain level', function () { + const serviceKeys: ServiceKeys = { + 'nested.api.example.com': ['nestedKey'], + 'api.example.com': ['apiKey'], + 'example.com': ['rootKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://deep.nested.api.example.com/path' + ) + expect(result).toEqual(['nestedKey']) + }) + }) + + describe('subdomain with port matching', function () { + test('matches parent domain with port', function () { + const serviceKeys: ServiceKeys = { + 'example.com:8080': ['portKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:8080/path' + ) + expect(result).toEqual(['portKey']) + }) + + test('prefers subdomain with port over parent domain with port', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com:8080': ['subPortKey'], + 'example.com:8080': ['parentPortKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:8080/path' + ) + expect(result).toEqual(['subPortKey']) + }) + + test('prefers subdomain with port over subdomain without port', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com:8080': ['subPortKey'], + 'api.example.com': ['subKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:8080/path' + ) + expect(result).toEqual(['subPortKey']) + }) + }) + + describe('no match scenarios', function () { + test('returns empty array when no keys match', function () { + const serviceKeys: ServiceKeys = { + 'other.com': ['otherKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path' + ) + expect(result).toEqual([]) + }) + + test('returns empty array for empty service keys', function () { + const serviceKeys: ServiceKeys = {} + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path' + ) + expect(result).toEqual([]) + }) + + test('does not match TLD only', function () { + const serviceKeys: ServiceKeys = { + com: ['tldKey'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path' + ) + expect(result).toEqual([]) + }) + }) + + describe('URL variations', function () { + test('works with http protocol', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['key1'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'http://api.example.com/path' + ) + expect(result).toEqual(['key1']) + }) + + test('works with query parameters', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['key1'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path?foo=bar' + ) + expect(result).toEqual(['key1']) + }) + + test('works with fragment', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['key1'] + } + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com/path#section' + ) + expect(result).toEqual(['key1']) + }) + + test('ignores default HTTPS port 443 (not included in URL.port)', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['key1'] + } + // Standard port 443 is not included in URL.port property + const result = serviceKeysFromUrl( + serviceKeys, + 'https://api.example.com:443/path' + ) + expect(result).toEqual(['key1']) + }) + + test('ignores default HTTP port 80 (not included in URL.port)', function () { + const serviceKeys: ServiceKeys = { + 'api.example.com': ['key1'] + } + // Standard port 80 is not included in URL.port property + const result = serviceKeysFromUrl( + serviceKeys, + 'http://api.example.com:80/path' + ) + expect(result).toEqual(['key1']) + }) + }) +}) diff --git a/yarn.lock b/yarn.lock index 079420a..50e7517 100644 --- a/yarn.lock +++ b/yarn.lock @@ -689,6 +689,11 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.7.0.tgz#b139c81999c23e3c8d3c0a7234480e945920fc40" integrity sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw== +"@pinojs/redact@^0.4.0": + version "0.4.0" + resolved "https://registry.yarnpkg.com/@pinojs/redact/-/redact-0.4.0.tgz#c3de060dd12640dcc838516aa2a6803cc7b2e9d6" + integrity sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg== + "@scure/base@~1.2.2", "@scure/base@~1.2.4": version "1.2.4" resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.2.4.tgz#002eb571a35d69bdb4c214d0995dff76a8dcd2a9" @@ -1083,6 +1088,11 @@ at-least-node@^1.0.0: resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== +atomic-sleep@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" + integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== + axios@^1.7.4: version "1.8.4" resolved "https://registry.yarnpkg.com/axios/-/axios-1.8.4.tgz#78990bb4bc63d2cae072952d374835950a82f447" @@ -3471,6 +3481,11 @@ object.values@^1.1.1: es-abstract "^1.18.0-next.1" has "^1.0.3" +on-exit-leak-free@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz#fed195c9ebddb7d9e4c3842f93f281ac8dadd3b8" + integrity sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA== + once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -3732,6 +3747,35 @@ pify@^3.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= +pino-abstract-transport@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pino-abstract-transport/-/pino-abstract-transport-3.0.0.tgz#b21e5f33a297e8c4c915c62b3ce5dd4a87a52c23" + integrity sha512-wlfUczU+n7Hy/Ha5j9a/gZNy7We5+cXp8YL+X+PG8S0KXxw7n/JXA3c46Y0zQznIJ83URJiwy7Lh56WLokNuxg== + dependencies: + split2 "^4.0.0" + +pino-std-serializers@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/pino-std-serializers/-/pino-std-serializers-7.1.0.tgz#a7b0cd65225f29e92540e7853bd73b07479893fc" + integrity sha512-BndPH67/JxGExRgiX1dX0w1FvZck5Wa4aal9198SrRhZjH3GxKQUKIBnYJTdj2HDN3UQAS06HlfcSbQj2OHmaw== + +pino@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/pino/-/pino-10.2.0.tgz#01d7f0fdabdb7bb31102a55770b6fe2dd3f139e8" + integrity sha512-NFnZqUliT+OHkRXVSf8vdOr13N1wv31hRryVjqbreVh/SDCNaI6mnRDDq89HVRCbem1SAl7yj04OANeqP0nT6A== + dependencies: + "@pinojs/redact" "^0.4.0" + atomic-sleep "^1.0.0" + on-exit-leak-free "^2.1.0" + pino-abstract-transport "^3.0.0" + pino-std-serializers "^7.0.0" + process-warning "^5.0.0" + quick-format-unescaped "^4.0.3" + real-require "^0.2.0" + safe-stable-stringify "^2.3.1" + sonic-boom "^4.0.1" + thread-stream "^4.0.0" + pirates@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" @@ -3791,6 +3835,11 @@ pretty-format@^29.7.0: ansi-styles "^5.0.0" react-is "^18.0.0" +process-warning@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/process-warning/-/process-warning-5.0.0.tgz#566e0bf79d1dff30a72d8bbbe9e8ecefe8d378d7" + integrity sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA== + progress@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" @@ -3859,6 +3908,11 @@ querystringify@^2.1.1: resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== +quick-format-unescaped@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz#93ef6dd8d3453cbc7970dd614fad4c5954d6b5a7" + integrity sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg== + react-is@^18.0.0: version "18.3.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e" @@ -3890,6 +3944,11 @@ read-pkg@^3.0.0: normalize-package-data "^2.3.2" path-type "^3.0.0" +real-require@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.2.0.tgz#209632dea1810be2ae063a6ac084fee7e33fba78" + integrity sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg== + regexpp@^3.0.0, regexpp@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" @@ -3988,6 +4047,11 @@ rxjs@^6.6.3: dependencies: tslib "^1.9.0" +safe-stable-stringify@^2.3.1: + version "2.5.0" + resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz#4ca2f8e385f2831c432a719b108a3bf7af42a1dd" + integrity sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA== + semver-compare@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" @@ -4158,6 +4222,13 @@ slice-ansi@^4.0.0: astral-regex "^2.0.0" is-fullwidth-code-point "^3.0.0" +sonic-boom@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/sonic-boom/-/sonic-boom-4.2.0.tgz#e59a525f831210fa4ef1896428338641ac1c124d" + integrity sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww== + dependencies: + atomic-sleep "^1.0.0" + source-map-support@0.5.13: version "0.5.13" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" @@ -4197,6 +4268,11 @@ spdx-license-ids@^3.0.0: resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== +split2@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/split2/-/split2-4.2.0.tgz#c9c5920904d148bab0b9f67145f245a86aadbfa4" + integrity sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg== + sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" @@ -4416,6 +4492,13 @@ thenify-all@^1.0.0: dependencies: any-promise "^1.0.0" +thread-stream@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/thread-stream/-/thread-stream-4.0.0.tgz#732f007c24da7084f729d6e3a7e3f5934a7380b7" + integrity sha512-4iMVL6HAINXWf1ZKZjIPcz5wYaOdPhtO8ATvZ+Xqp3BTdaqtAwQkNmKORqcIo5YkQqGXq5cwfswDwMqqQNrpJA== + dependencies: + real-require "^0.2.0" + through@^2.3.8: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5"