forked from simpleledger/SLPDB
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbit.ts
More file actions
606 lines (544 loc) · 27.1 KB
/
bit.ts
File metadata and controls
606 lines (544 loc) · 27.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
import { Info, ChainSyncCheckpoint } from './info';
import { TNA, TNATxn } from './tna';
import { Config } from './config';
import { Db } from './db';
import { Query } from './query';
import pLimit = require('p-limit');
import * as pQueue from 'p-queue';
import * as zmq from 'zeromq';
import { BlockHeaderResult } from 'bitcoin-com-rest';
import { BITBOX } from 'bitbox-sdk';
import * as bitcore from 'bitcore-lib-cash';
import { Primatives } from 'slpjs';
import { RpcClient } from './rpc';
import { CacheSet, CacheMap } from './cache';
import { SlpGraphManager } from './slpgraphmanager';
import { Notifications } from './notifications';
import { SlpdbStatus } from './status';
import { GraphTxnDbo } from './interfaces';
const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
const Block = require('bcash/lib/primitives/block');
const BufferReader = require('bufio/lib/reader');
const bitbox = new BITBOX();
export enum SyncType {
"Mempool", "Block"
}
export enum SyncFilterTypes {
"BCH", "SLP"
}
export interface SyncCompletionInfo {
syncType: SyncType;
filteredContent: Map<SyncFilterTypes, Map<txid, txhex>>;
}
export type CrawlResult = Map<txid, CrawlTxnInfo>;
export interface CrawlTxnInfo {
tnaTxn: TNATxn;
txHex: string;
}
export type txhex = string;
export type txid = string;
//export type TransactionPool = Map<txid, txhex>;
export class Bit {
db: Db;
tna: TNA = new TNA();
outsock = zmq.socket('pub');
slpMempool = new Map<txid, txhex>();
txoDoubleSpendCache = new CacheMap<string, any>(20);
doubleSpendCache = new CacheSet<string>(100);
slpMempoolIgnoreSetList = new CacheSet<string>(Config.core.slp_mempool_ignore_length);
blockHashIgnoreSetList = new CacheSet<string>(10);
_slpGraphManager!: SlpGraphManager;
_zmqItemQueue: pQueue<pQueue.DefaultAddOptions>;
network!: string;
notifications!: Notifications;
_spentTxoCache = new CacheMap<string, { txid: string, block: number|null }>(100000);
constructor(db: Db) {
this.db = db;
this._zmqItemQueue = new pQueue({ concurrency: 1, autoStart: true });
if(Config.zmq.outgoing.enable)
this.outsock.bindSync('tcp://' + Config.zmq.outgoing.host + ':' + Config.zmq.outgoing.port);
}
async init() {
this.network = await Info.getNetwork();
await this.waitForFullNodeSync();
}
slpTransactionFilter(txnhex: string): boolean {
if(txnhex.includes('6a04534c5000')) {
return true;
}
return false;
}
private async waitForFullNodeSync() {
let bitbox = this.network === 'mainnet' ? new BITBOX({ restURL: `https://rest.bitcoin.com/v2/` }) : new BITBOX({ restURL: `https://trest.bitcoin.com/v2/` });
let isSyncd = false;
let lastReportedSyncBlocks = 0;
while (!isSyncd) {
let info = await RpcClient.getBlockchainInfo();
let chain = info.chain;
if(chain === 'regtest') {
break;
}
let syncdBlocks = info.blocks;
let networkBlocks = (await bitbox.Blockchain.getBlockchainInfo()).blocks;
isSyncd = syncdBlocks === networkBlocks ? true : false;
if (syncdBlocks !== lastReportedSyncBlocks)
console.log("[INFO] Waiting for bitcoind to sync with network ( on block", syncdBlocks, "of", networkBlocks, ")");
else
console.log("[WARN] bitcoind sync status did not change, check your bitcoind network connection.");
lastReportedSyncBlocks = syncdBlocks;
await sleep(2000);
}
}
async requestheight(): Promise<number> {
try{
return await RpcClient.getBlockCount();
} catch(err) {
console.log('Check your RPC connection. Could not get height from full node rpc call.')
throw err;
}
}
async getSlpMempoolTransaction(txid: string): Promise<bitcore.Transaction|null> {
if(this.slpMempool.has(txid)) {
return new bitcore.Transaction(this.slpMempool.get(txid)!);
}
return null;
}
async handleMempoolTransaction(txid: string, txhex?: string): Promise<{ isSlp: boolean, added: boolean }> {
if(this.slpMempool.has(txid))
return { isSlp: true, added: false };
if(this.slpMempoolIgnoreSetList.has(txid))
return { isSlp: false, added: false };
if(!txhex) {
try {
txhex = <string>await RpcClient.getRawTransaction(txid);
} catch(err) {
console.log(`[ERROR] Could not find tranasaction ${txhex} in handleMempoolTransaction: ${err}`);
return { isSlp: false, added: false }
}
}
let txnBuf = Buffer.from(txhex, 'hex');
RpcClient.loadTxnIntoCache(txid, txnBuf);
// check for double spending of inputs, if found delete double spent txid from the mempool
// TODO: Need to test how this will work with BCHD!
let inputTxos = Primatives.Transaction.parseFromBuffer(txnBuf).inputs;
let txidToDelete: string[] = [];
inputTxos.forEach(input => {
let txo = `${input.previousTxHash}:${input.previousTxOutIndex}`
if (this._spentTxoCache.has(txo)) {
let doubleSpentTxid = this._spentTxoCache.get(txo)!.txid;
if (doubleSpentTxid !== txid) {
console.log(`[INFO] Detected double spent ${txo} --> original: ${doubleSpentTxid}, current: ${txid}`);
this.slpMempool.delete(doubleSpentTxid);
RpcClient.transactionCache.delete(doubleSpentTxid);
this.db.unconfirmedDelete(doubleSpentTxid); // no need to await
this.db.confirmedDelete(doubleSpentTxid); // no need to await
if(this._slpGraphManager._tokens.has(doubleSpentTxid)) {
this._slpGraphManager._tokens.delete(doubleSpentTxid);
this.db.tokenDelete(doubleSpentTxid); // no need to await
this.db.graphDelete(doubleSpentTxid); // no need to await
this.db.addressDelete(doubleSpentTxid); // no need to await
this.db.utxoDelete(doubleSpentTxid); // no need to await
} else {
txidToDelete.push(doubleSpentTxid);
}
let date = new Date();
this.txoDoubleSpendCache.set(txo, { originalTxid: doubleSpentTxid, current: txid, time: { utc: date.toUTCString(), unix: Math.floor(date.getTime()/1000) }});
this.doubleSpendCache.push(doubleSpentTxid);
SlpdbStatus.doubleSpendHistory = Array.from(this.txoDoubleSpendCache.toMap()).map(v => { return { txo: v[0], details: v[1]}});
}
}
if (!txo.startsWith('0'.repeat(64))) { // ignore coinbase
this._spentTxoCache.set(txo, { txid, block: null });
}
});
let tokenIdToUpdate= new Set<string>();
if(txidToDelete.length > 0) {
for (let i = 0; i < txidToDelete.length; i++) {
let g: GraphTxnDbo|null = await this.db.graphTxnFetch(txidToDelete[i]);
if(g && this._slpGraphManager._tokens.has(g.tokenDetails.tokenIdHex)) {
let t = this._slpGraphManager._tokens.get(g.tokenDetails.tokenIdHex);
if(t!._graphTxns.has(txidToDelete[i])) {
t!._graphTxns.delete(txidToDelete[i]);
tokenIdToUpdate.add(txidToDelete[i])
}
}
}
}
tokenIdToUpdate.forEach(tokenId => {
if (this._slpGraphManager._tokens.has(tokenId)) {
this._slpGraphManager._tokens.get(tokenId)!.UpdateStatistics(); // no need to await
}
});
if(this.slpTransactionFilter(txhex)) {
this.slpMempool.set(txid, txhex);
return { isSlp: true, added: true };
} else {
this.slpMempoolIgnoreSetList.push(txid);
}
return { isSlp: false, added: false };
}
async removeMempoolTransaction(txid: string) {
this.slpMempool.delete(txid);
this.db.unconfirmedDelete(txid);
}
async requestSlpMempool(): Promise<TNATxn[]> {
try {
await this.syncSlpMempool();
let tasks: any[] = [];
const limit = pLimit(Config.rpc.limit);
let self = this;
this.slpMempool.forEach((txhex, txid, map) => {
tasks.push(limit(async function() {
let content = <bitcore.Transaction>(await self.getSlpMempoolTransaction(txid));
return self.tna.fromTx(content, { network: self.network });
}))
})
let res = await Promise.all(tasks);
return res;
} catch(err) {
console.log("An unknown error occurred while processing mempool transactions.");
throw err;
}
}
async asyncForEach(array: any[], callback: Function) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
}
async syncSlpMempool() {
let currentBchMempoolList = await RpcClient.getRawMemPool();
console.log('[INFO] BCH mempool txs =', currentBchMempoolList.length);
// Remove cached txs not in the mempool.
this.removeExtraneousMempoolTxns();
// Add SLP txs to the mempool not in the cache.
let cachedSlpMempoolTxs = Array.from(this.slpMempool.keys());
await this.asyncForEach(currentBchMempoolList, async (txid: string) => cachedSlpMempoolTxs.includes(txid) ? null : await this.handleMempoolTransaction(txid) );
console.log('[INFO] SLP mempool txs =', this.slpMempool.size);
}
async crawl(block_index: number, triggerSlpProcessing: boolean): Promise<CrawlResult|null> {
let result = new Map<txid, CrawlTxnInfo>();
let block_content: BlockHeaderResult;
try {
block_content = await RpcClient.getBlockInfo({ index: block_index });
} catch(_) {
return null;
}
let block_hash = block_content.hash;
let block_time = block_content.time;
if (block_content) {
console.log('[INFO] Crawling block', block_index, 'hash:', block_hash);
let tasks: Promise<any>[] = [];
const limit = pLimit(Config.rpc.limit);
const self = this;
let blockHex = <string>await RpcClient.getRawBlock(block_content.hash);
let block = Block.fromReader(new BufferReader(Buffer.from(blockHex, 'hex')));
for(let i=1; i < block.txs.length; i++) { // skip coinbase with i=1
let txnhex = block.txs[i].toRaw().toString('hex');
if(this.slpTransactionFilter(txnhex) && !this.slpMempool.has(block.txs[i].txid())) {
// This is used when SLP transactions are broadcasted for first time with a block
if(triggerSlpProcessing) {
console.log("SLP transaction not in mempool:", block.txs[i].txid());
await this.handleMempoolTransaction(block.txs[i].txid(), txnhex);
let syncResult = await Bit.sync(this, 'mempool', block.txs[i].txid());
this._slpGraphManager.onTransactionHash!(syncResult!);
}
// This is used during startup block sync
else {
tasks.push(limit(async function() {
try {
let txn: bitcore.Transaction = new bitcore.Transaction(txnhex);
let t: TNATxn = await self.tna.fromTx(txn, { network: self.network });
result.set(txn.hash, { txHex: txnhex, tnaTxn: t })
t.blk = {
h: block_hash,
i: block_index,
t: block_time
};
return t;
} catch(err) {
console.log('[Error] crawl error:', err.message);
throw err;
}
}))
}
}
if(this.slpMempool.has(block.txs[i].txid())) {
console.log("[INFO] Mempool has txid", block.txs[i].txid());
tasks.push(limit(async function() {
let t: TNATxn|null = await self.db.unconfirmedFetch(block.txs[i].txid());
if(!t) {
let txn: bitcore.Transaction = new bitcore.Transaction(txnhex);
t = await self.tna.fromTx(txn, { network: self.network });
}
t.blk = {
h: block_hash,
i: block_index,
t: block_time
};
result.set(block.txs[i].txid(), { txHex: txnhex, tnaTxn: t });
return t;
}));
}
}
let btxs = (await Promise.all(tasks)).filter(i => i);
console.log('[INFO] Block', block_index, 'processed :', block.txs.length, 'BCH txs |', btxs.length, 'SLP txs');
return result;
} else {
return null;
}
}
listenToZmq() {
let sync = Bit.sync;
this._slpGraphManager._TnaQueue = this._zmqItemQueue;
let self = this;
let onBlockHash = function(blockHash: Buffer) {
SlpdbStatus.updateTimeIncomingBlockZmq();
self._zmqItemQueue.add(async function() {
let hash = blockHash.toString('hex');
if(self.blockHashIgnoreSetList.has(hash)) {
console.log('[ZMQ-SUB] Block message ignored:', hash);
return;
}
self.blockHashIgnoreSetList.push(hash);
console.log('[ZMQ-SUB] New block found:', hash);
await sync(self, 'block', hash);
if(!self._slpGraphManager.zmqPubSocket) {
self._slpGraphManager.zmqPubSocket = self.outsock;
}
if(self._slpGraphManager.onBlockHash) {
self._slpGraphManager.onBlockHash!(hash!);
}
});
}
let onRawTxn = function(message: Buffer) {
SlpdbStatus.updateTimeIncomingTxnZmq();
self._zmqItemQueue.add(async function() {
let rawtx = message.toString('hex');
let hash = Buffer.from(bitbox.Crypto.hash256(message).toJSON().data.reverse()).toString('hex');
if((await self.handleMempoolTransaction(hash, rawtx)).added) {
console.log('[ZMQ-SUB] New unconfirmed transaction added:', hash);
let syncResult = await sync(self, 'mempool', hash);
if(!self._slpGraphManager.zmqPubSocket)
self._slpGraphManager.zmqPubSocket = self.outsock;
if(syncResult && self._slpGraphManager.onTransactionHash) {
self._slpGraphManager.onTransactionHash!(syncResult);
}
} else {
console.log('[INFO] Transaction ignored:', hash);
}
})
}
this.notifications = new Notifications({
onRawTxnCb: onRawTxn,
onBlockHashCb: onBlockHash,
useGrpc: Boolean(Config.grpc.url)
})
console.log('[INFO] Listening for blockchain events...');
}
// This method is called at the end of processing each block
async handleConfirmedTxnsMissingSlpMetadata() {
let missing = await Query.queryForConfirmedMissingSlpMetadata();
if(missing) {
await this.asyncForEach(missing, async (txid:string) => {
await this._slpGraphManager.updateTxnCollections(txid);
})
}
}
async checkForMissingMempoolTxns(currentBchMempoolList?: string[], recursive=false, log=true) {
if(!currentBchMempoolList)
currentBchMempoolList = await RpcClient.getRawMemPool();
// add missing SLP transactions and process
await this.asyncForEach(currentBchMempoolList, async (txid: string) => {
if((await this.handleMempoolTransaction(txid)).added) {
let syncResult = await Bit.sync(this, 'mempool', txid, this.slpMempool.get(txid));
this._slpGraphManager.onTransactionHash!(syncResult!);
}
});
if(recursive) {
let residualMempoolList = (await RpcClient.getRawMemPool()).filter(id => !this.slpMempoolIgnoreSetList.has(id) && !Array.from(this.slpMempool.keys()).includes(id))
if(residualMempoolList.length > 0)
await this.checkForMissingMempoolTxns(residualMempoolList, true, false)
}
if(log) {
console.log('[INFO] BCH mempool txn count:', (await RpcClient.getRawMemPool()).length);
console.log("[INFO] SLP mempool txn count:", this.slpMempool.size);
}
}
// async checkCurrentBlockHeight() {
// //let ldb_block = await Info.getBlockCheckpoint();
// let rpc_block = await this.rpc.getBlockCount();
// if(rpc_block > this.lastBlockProcessing) {
// }
// }
async removeExtraneousMempoolTxns() {
let currentBchMempoolList = await RpcClient.getRawMemPool();
// remove extraneous SLP transactions no longer in the mempool
let cacheCopyForRemovals = new Map(this.slpMempool);
let txids = cacheCopyForRemovals.keys()
for(let i = 0; i < cacheCopyForRemovals.size; i++) {
let txid = txids.next().value
if(!currentBchMempoolList.includes(txid)) {
await this.removeMempoolTransaction(txid)
}
}
}
static async sync(self: Bit, type: string, hash?: string, txhex?: string): Promise<SyncCompletionInfo|null> {
let result: SyncCompletionInfo;
if (type === 'block') {
result = { syncType: SyncType.Block, filteredContent: new Map<SyncFilterTypes, Map<txid, txhex>>() }
try {
let lastCheckpoint = hash ? <ChainSyncCheckpoint>await Info.getBlockCheckpoint() : <ChainSyncCheckpoint>await Info.getBlockCheckpoint((await Info.getNetwork()) === 'mainnet' ? Config.core.from : Config.core.from_testnet);
lastCheckpoint = await Bit.checkForBlockReorg(lastCheckpoint);
let currentHeight: number = await self.requestheight();
for(let index: number = lastCheckpoint.height + 1; index <= currentHeight; index++) {
console.time('[PERF] RPC END ' + index);
let requireSlpData = hash ? true : false;
let content = <CrawlResult>(await self.crawl(index, requireSlpData));
console.timeEnd('[PERF] RPC END ' + index);
console.time('[PERF] DB Insert ' + index);
if(content) {
let array = Array.from(content.values()).map(c => c.tnaTxn);
await self.db.confirmedReplace(array, requireSlpData, index);
array.forEach(tna => {
self.removeMempoolTransaction(tna.tx.h);
});
}
if (index - 100 > 0) {
await Info.deleteBlockCheckpointHash(index - 100);
}
try {
await Info.updateBlockCheckpoint(index, await RpcClient.getBlockHash(index));
} catch(_) {
lastCheckpoint = await Bit.checkForBlockReorg(lastCheckpoint);
index = lastCheckpoint.height;
continue;
}
console.timeEnd('[PERF] DB Insert ' + index);
// re-check current height in case it was updated during crawl()
currentHeight = await self.requestheight();
}
// clear mempool and synchronize
if (lastCheckpoint.height < currentHeight && hash) {
await self.checkForMissingMempoolTxns();
await self.removeExtraneousMempoolTxns();
await self.handleConfirmedTxnsMissingSlpMetadata();
}
if (lastCheckpoint.height === currentHeight) {
return result;
} else {
return null;
}
} catch (e) {
console.log('[ERROR] block sync Error');
throw e;
}
} else if (type === 'mempool') {
result = { syncType: SyncType.Mempool, filteredContent: new Map<SyncFilterTypes, Map<txid, txhex>>() }
if (hash) {
let txn: bitcore.Transaction|null = await self.getSlpMempoolTransaction(hash);
if(!txn && !self.slpMempoolIgnoreSetList.has(hash)) {
if(!txhex)
throw Error("Must provide 'txhex' if txid is not in the SLP mempool")
if(self.slpTransactionFilter(txhex))
txn = new bitcore.Transaction(txhex);
}
if(txn) {
let content: TNATxn = await self.tna.fromTx(txn, { network: self.network });
try {
await self.db.unconfirmedInsert(content);
console.log("[INFO] SLP mempool transaction added: ", hash);
} catch (e) {
if (e.code == 11000) {
console.log('[WARN] Mempool item already exists:', content);
//await self.db.mempoolreplace(content);
} else {
console.log('[ERROR] Mempool sync ERR:', e, content);
throw e;
}
}
let pool = new Map<txid, txhex>();
pool.set(hash, txn.toString());
result.filteredContent.set(SyncFilterTypes.SLP, pool)
} else {
console.log("[INFO] Skipping non-SLP transaction:", hash);
}
return result;
}
}
return null;
}
static async checkForBlockReorg(lastCheckpoint: ChainSyncCheckpoint): Promise<ChainSyncCheckpoint> {
// first, find a height with a block hash - should normallly be found on first try, otherwise rollback
let from = (await Info.getNetwork()) === 'mainnet' ? Config.core.from : Config.core.from_testnet;
let hadReorg = false;
let actualHash: string|null = null;
let maxRollback = 100;
let rollbackCount = 0;
while(!actualHash) {
try {
console.log(`[INFO] Checking for reorg for ${lastCheckpoint.height}`);
actualHash = await RpcClient.getBlockHash(lastCheckpoint.height);
console.log(`[INFO] Confirmed actual block hash: ${actualHash} at ${lastCheckpoint.height}`);
} catch (err) {
if(lastCheckpoint.height > from) {
console.log(`[WARN] Missing actual hash for height ${lastCheckpoint.height}, rolling back.`);
lastCheckpoint.hash = null;
lastCheckpoint.height--;
rollbackCount++;
hadReorg = true;
} else {
console.log(`[WARN] Cannot rollback further than ${lastCheckpoint.height}.`);
}
}
if (rollbackCount > 0 && lastCheckpoint.height > from) {
console.log(`[WARN] Current checkpoint set to ${actualHash} ${lastCheckpoint.height} after rollback.`);
await Info.updateBlockCheckpoint(lastCheckpoint.height, actualHash);
} else if(lastCheckpoint.height <= from) {
return { height: from, hash: null, hadReorg: true };
}
if(maxRollback > 0 && rollbackCount > maxRollback) {
throw Error("A large rollback occurred when trying to find actual block hash, this should not happen, shutting down");
}
}
// Next, we should ensure our previous block hash stored in leveldb
// matches the current tip's previous hash, otherwise we need to rollback again
let prevBlockHash = (<BlockHeaderResult>await RpcClient.getBlockInfo({ hash: actualHash })).previousblockhash;
let prevBlockHeight = lastCheckpoint.height - 1;
console.log(`[INFO] Checking previous actual block hash: ${prevBlockHash} for ${prevBlockHeight}`);
let storedPrevCheckpointHash = await Info.getCheckpointHash(prevBlockHeight);
console.log(`[INFO] Previously stored hash: ${storedPrevCheckpointHash} at ${prevBlockHeight}`);
if(storedPrevCheckpointHash) {
maxRollback = 100;
rollbackCount = 0;
while (storedPrevCheckpointHash !== prevBlockHash && prevBlockHeight > from) {
rollbackCount++;
hadReorg = true;
storedPrevCheckpointHash = await Info.getCheckpointHash(--prevBlockHeight);
prevBlockHash = (<BlockHeaderResult>await RpcClient.getBlockInfo({ hash: prevBlockHash })).previousblockhash;
console.log(`[WARN] Rolling back to stored previous height ${prevBlockHeight}`);
console.log(`[WARN] Rollback - actual previous hash ${prevBlockHash}`);
console.log(`[WARN] Rollback - stored previous hash ${storedPrevCheckpointHash}`);
if(maxRollback > 0 && rollbackCount > maxRollback) {
throw Error("A large rollback occurred when rolling back due to prev hash mismatch, this should not happen, shutting down");
}
actualHash = prevBlockHash;
lastCheckpoint.height = prevBlockHeight;
}
if(rollbackCount > 0 && lastCheckpoint.height > from) {
console.log(`[WARN] Current checkpoint at ${actualHash} ${lastCheckpoint.height}`);
await Info.updateBlockCheckpoint(lastCheckpoint.height, actualHash);
} else if(lastCheckpoint.height <= from) {
return { height: from, hash: null, hadReorg: true }
}
}
// return current checkpoint - if a rollback occured the returned value will be for the matching previous block hash
return { hash: actualHash, height: lastCheckpoint.height, hadReorg };
}
async processBlocksForTNA() {
await Bit.sync(this, 'block');
}
async processCurrentMempoolForTNA() {
let items = await this.requestSlpMempool();
await this.db.unconfirmedSync(items);
}
}