diff --git a/bin/cancel-pending-tx.js b/bin/cancel-pending-tx.js index b5928d38..146ab4c5 100644 --- a/bin/cancel-pending-tx.js +++ b/bin/cancel-pending-tx.js @@ -6,7 +6,7 @@ import { ethers } from 'ethers' import { CoinType, newDelegatedEthAddress } from '@glif/filecoin-address' import pRetry from 'p-retry' -import { createMeridianContract } from '../lib/ie-contract.js' +import { provider } from '../lib/contracts.js' const { WALLET_SEED @@ -17,8 +17,6 @@ const [, , tx] = process.argv assert(WALLET_SEED, 'WALLET_SEED required') assert(tx, 'Transaction hash must be provided as the first argument') -const { provider } = await createMeridianContract() - const signer = ethers.Wallet.fromPhrase(WALLET_SEED, provider) const walletDelegatedAddress = newDelegatedEthAddress(/** @type {any} */(signer.address), CoinType.MAIN).toString() console.log( diff --git a/bin/dry-run.js b/bin/dry-run.js index ec181b79..e265621f 100644 --- a/bin/dry-run.js +++ b/bin/dry-run.js @@ -12,7 +12,7 @@ import path from 'node:path' import { fileURLToPath } from 'node:url' import pg from 'pg' import { RoundData } from '../lib/round.js' -import { createMeridianContract } from '../lib/ie-contract.js' +import { createMeridianContract, provider } from '../lib/contracts.js' import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' /** @typedef {import('../lib/preprocess.js').Measurement} Measurement */ @@ -213,7 +213,7 @@ async function fetchMeasurementsAddedEvents (contractAddress, roundIndex) { * @param {bigint} roundIndex */ async function fetchMeasurementsAddedFromChain (contractAddress, roundIndex) { - const { ieContract, provider } = await createMeridianContract(contractAddress) + const ieContract = createMeridianContract(contractAddress) console.log('Fetching MeasurementsAdded events from the ledger') @@ -268,6 +268,6 @@ function isEventLog (logOrEventLog) { * @param {string} contractAddress */ async function fetchLastRoundIndex (contractAddress) { - const { ieContract } = await createMeridianContract(contractAddress) + const ieContract = createMeridianContract(contractAddress) return await ieContract.currentRoundIndex() } diff --git a/bin/fetch-recent-miner-measurements.js b/bin/fetch-recent-miner-measurements.js index 58c8ffad..b022608f 100644 --- a/bin/fetch-recent-miner-measurements.js +++ b/bin/fetch-recent-miner-measurements.js @@ -9,7 +9,7 @@ import { mkdir, readFile, writeFile } from 'node:fs/promises' import os from 'node:os' import path from 'node:path' import pMap from 'p-map' -import { createMeridianContract } from '../lib/ie-contract.js' +import { createMeridianContract } from '../lib/contracts.js' import { fetchMeasurements, preprocess } from '../lib/preprocess.js' import { RoundData } from '../lib/round.js' import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' @@ -140,7 +140,7 @@ console.error('Wrote human-readable summary for %s to %s', minerId, MINER_SUMMAR * @returns */ async function getRecentMeasurementsAddedEvents (contractAddress, blocksToQuery = Number.POSITIVE_INFINITY) { - const { ieContract } = await createMeridianContract(contractAddress) + const ieContract = createMeridianContract(contractAddress) // max look-back period allowed by Glif.io is 2000 blocks (approx 16h40m) // in practice, requests for the last 2000 blocks are usually rejected, diff --git a/bin/spark-evaluate.js b/bin/spark-evaluate.js index bf5fdead..59ec74e1 100644 --- a/bin/spark-evaluate.js +++ b/bin/spark-evaluate.js @@ -9,7 +9,7 @@ import { recordTelemetry } from '../lib/telemetry.js' import { fetchMeasurements } from '../lib/preprocess.js' import { migrateWithPgConfig } from '../lib/migrate.js' import pg from 'pg' -import { createMeridianContract } from '../lib/ie-contract.js' +import { createMeridianContract, provider } from '../lib/contracts.js' import { startCancelStuckTxs } from '../lib/cancel-stuck-txs.js' const { @@ -28,7 +28,7 @@ assert(WALLET_SEED, 'WALLET_SEED required') await migrateWithPgConfig({ connectionString: DATABASE_URL }) -const { ieContract, provider } = await createMeridianContract() +const ieContract = await createMeridianContract() const signer = ethers.Wallet.fromPhrase(WALLET_SEED, provider) const walletDelegatedAddress = newDelegatedEthAddress(/** @type {any} */(signer.address), CoinType.MAIN).toString() diff --git a/lib/contracts.js b/lib/contracts.js new file mode 100644 index 00000000..0ea75283 --- /dev/null +++ b/lib/contracts.js @@ -0,0 +1,30 @@ +import { ethers } from 'ethers' +import { rpcUrls, GLIF_TOKEN } from './config.js' +import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' +import * as SparkEvaluationsRecentParticipants from '@filecoin-station/spark-evaluations-recent-participants' + +export const provider = new ethers.FallbackProvider(rpcUrls.map(url => { + const fetchRequest = new ethers.FetchRequest(url) + fetchRequest.setHeader('Authorization', `Bearer ${GLIF_TOKEN}`) + return new ethers.JsonRpcProvider(fetchRequest, null, { + polling: true, + batchMaxCount: 10 + }) +})) + +// Uncomment for troubleshooting +// provider.on('debug', d => console.log('[ethers:debug %s] %s %o', new Date().toISOString().split('T')[1], d.action, d.payload ?? d.result)) + +export const createMeridianContract = (contractAddress = SparkImpactEvaluator.ADDRESS) => { + return new ethers.Contract( + contractAddress, + SparkImpactEvaluator.ABI, + provider + ) +} + +export const recentParticipantsContract = new ethers.Contract( + SparkEvaluationsRecentParticipants.ADDRESS, + SparkEvaluationsRecentParticipants.ABI, + provider +) diff --git a/lib/ie-contract.js b/lib/ie-contract.js deleted file mode 100644 index 94578e3d..00000000 --- a/lib/ie-contract.js +++ /dev/null @@ -1,25 +0,0 @@ -import { ethers } from 'ethers' -import { rpcUrls, GLIF_TOKEN } from './config.js' -import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator' - -export const createMeridianContract = async (contractAddress = SparkImpactEvaluator.ADDRESS) => { - const provider = new ethers.FallbackProvider(rpcUrls.map(url => { - const fetchRequest = new ethers.FetchRequest(url) - fetchRequest.setHeader('Authorization', `Bearer ${GLIF_TOKEN}`) - return new ethers.JsonRpcProvider(fetchRequest, null, { - polling: true, - batchMaxCount: 10 - }) - })) - - // Uncomment for troubleshooting - // provider.on('debug', d => console.log('[ethers:debug %s] %s %o', new Date().toISOString().split('T')[1], d.action, d.payload ?? d.result)) - - const ieContract = new ethers.Contract( - contractAddress, - SparkImpactEvaluator.ABI, - provider - ) - - return { ieContract, provider } -} diff --git a/lib/platform-stats.js b/lib/platform-stats.js index 3fbe74aa..67f790d4 100644 --- a/lib/platform-stats.js +++ b/lib/platform-stats.js @@ -2,6 +2,7 @@ import assert from 'node:assert' import createDebug from 'debug' import * as Sentry from '@sentry/node' import pRetry from 'p-retry' +import { recentParticipantsContract } from './contracts.js' const debug = createDebug('spark:platform-stats') @@ -105,6 +106,22 @@ export const updateDailyStationStats = async ( */ export const updateDailyParticipants = async (pgClient, participantIds) => { debug('Updating daily participants, count=%s', participantIds.length) + ;(async () => { + const { rows } = await pgClient.query(` + SELECT participant_address FROM participants WHERE id = ANY($1::INT[]) + `, [ + participantIds + ]) + const addresses = rows.map(row => row.participant_address) + try { + await recentParticipantsContract.set(new Date().getDay(), addresses) + } catch (err) { + console.error('Error updating spark-evaluations-recent-participants', err) + Sentry.captureException(err) + } + })() + // FIXME: Remove this part once `spark-evaluations-recent-participants` is in + // full use await pgClient.query(` INSERT INTO daily_participants (day, participant_id) SELECT now() as day, UNNEST($1::INT[]) AS participant_id diff --git a/package-lock.json b/package-lock.json index dcd5f30e..02c9ac60 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6,6 +6,7 @@ "": { "name": "spark-evaluate", "dependencies": { + "@filecoin-station/spark-evaluations-recent-participants": "^3.0.0", "@filecoin-station/spark-impact-evaluator": "^1.1.1", "@glif/filecoin-address": "^3.0.12", "@influxdata/influxdb-client": "^1.35.0", @@ -286,6 +287,11 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, + "node_modules/@filecoin-station/spark-evaluations-recent-participants": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@filecoin-station/spark-evaluations-recent-participants/-/spark-evaluations-recent-participants-3.0.0.tgz", + "integrity": "sha512-t55W0+1gqgIaUW/GvNEGzfRegnn0oKYlfHpNfhj2hb0/5fDyphSwoeQYZkVD4BZg758gJci18uC5wnocUUx0uQ==" + }, "node_modules/@filecoin-station/spark-impact-evaluator": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@filecoin-station/spark-impact-evaluator/-/spark-impact-evaluator-1.1.1.tgz", diff --git a/package.json b/package.json index 1015cc35..db018aea 100644 --- a/package.json +++ b/package.json @@ -18,6 +18,7 @@ "typescript": "^5.6.2" }, "dependencies": { + "@filecoin-station/spark-evaluations-recent-participants": "^3.0.0", "@filecoin-station/spark-impact-evaluator": "^1.1.1", "@glif/filecoin-address": "^3.0.12", "@influxdata/influxdb-client": "^1.35.0",