-
Notifications
You must be signed in to change notification settings - Fork 22
Expand file tree
/
Copy pathgenerate.js
More file actions
135 lines (112 loc) · 3.97 KB
/
generate.js
File metadata and controls
135 lines (112 loc) · 3.97 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
const fs = require('fs');
const fetch = require('node-fetch');
const path = require('path');
const API_ROOT = 'https://community-api.coinmetrics.io/v4';
const MIN_FETCH_INTERVAL = 600; // rate limit interval in ms (used only for debugging)
const PAGE_SIZE = 10000;
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function fetchDataWithRetry(url) {
let response;
for (let attempt = 1; 1; attempt++) {
try {
response = await fetch(API_ROOT + url);
if (response.ok) {
const jsonResponse = await response.json();
return jsonResponse.data;
}
console.warn(`Fetch attempt ${attempt} failed due to HTTP: ${response.status} ${response.statusText}. See cf-ray: ${response.headers.get('cf-ray')} Retrying...`);
} catch (error) {
console.warn(`Fetch attempt ${attempt}: ${error}`);
}
await sleep(1000);
}
}
const emitInfo = msg => {
console.log(`[INF ${timestamp()}] ${msg}`);
};
const emitVerbose = msg => {
if (process.env.VERBOSE) {
emitInfo(msg);
}
};
const emitErrorAndDie = msg => {
console.error(`[ERR ${timestamp()}] ${msg} (status: ${lastStatus}`);
process.exit(1);
};
let lastStatus = null;
const apiFetch = async path => fetch(API_ROOT + path)
.then(res => {
lastStatus = res.status;
return res;
})
.then(res => res.json())
.then(res => res.data);
const fsWrite = (filename, content) => {
filename = path.resolve(process.env.OUT, `${filename}.csv`);
fs.writeFileSync(filename, content);
emitVerbose(`Written ~${content.length} bytes to ${filename}`);
};
const metricIdsFromAssetInfo = assetInfo => assetInfo.metrics
.filter(metric => metric.frequencies.find(freq => freq.frequency === '1d'))
.map(metric => metric.metric);
const dataPointToCsvCols = (dataPoint, metricIds) => metricIds.map(metricId => dataPoint[metricId] ?? '').join(',');
const timestamp = () => new Date().toISOString();
const wait = interval => new Promise(res => setTimeout(res, interval));
const shouldOmitAsset = assetInfo => {
return assetInfo.metrics == null || assetInfo.metrics.every(metric => metric.metric.startsWith('ReferenceRate'));
};
(async () => {
const assetIndex = {};
const assetIds = [];
// build an asset info index and a list of asset IDs
const assetInfo = await apiFetch('/catalog/assets');
if (assetInfo == null) {
emitErrorAndDie('Failed to fetch asset info');
process.exit(1);
}
for (const info of assetInfo) {
if (shouldOmitAsset(info)) {
emitVerbose(`Omitting ${info.asset}`);
continue;
}
assetIndex[info.asset] = info;
assetIds.push(info.asset);
}
assetIds.sort();
// retrieve metrics for every asset
const totalAssets = assetIds.length;
let fetchedAssets = 0;
let lastFetchTime = 0;
for (const assetId of (process.env.ASSETS?.split(',') ?? assetIds)) {
const timeSinceLastFetch = Date.now() - lastFetchTime;
if (process.env.THROTTLE && timeSinceLastFetch < MIN_FETCH_INTERVAL) {
emitVerbose(`Pausing for ${MIN_FETCH_INTERVAL - timeSinceLastFetch}ms`);
await wait(MIN_FETCH_INTERVAL - timeSinceLastFetch);
}
fetchedAssets++;
const info = assetIndex[assetId];
const metricIds = metricIdsFromAssetInfo(info);
// fetch data
emitInfo(`Fetching ${assetId} data with ${metricIds.length} metrics... (${fetchedAssets}/${totalAssets})`);
lastFetchTime = Date.now();
if (metricIds.length === 0) {
emitInfo(`Skipping asset: ${assetId} because there is no available metrics`)
continue
}
const seriesData = await fetchDataWithRetry(`/timeseries/asset-metrics/?assets=${encodeURIComponent(assetId)}&metrics=${encodeURIComponent(metricIds.join(','))}&page_size=${PAGE_SIZE}`);
if (seriesData == null) {
emitErrorAndDie(`Failed to fetch data for ${assetId}`);
}
// build CSV
let csv = `time,${metricIds.join(',')}\n`;
for (let dataPoint of seriesData) {
csv += `${dataPoint.time.split('T')[0]},${dataPointToCsvCols(dataPoint, metricIds)}\n`;
}
// write to file
fsWrite(assetId, csv);
await sleep(750)
}
emitInfo('Finished');
})();