From 6dc5054bbfdca74d400d0a750f1d073fca1a7621 Mon Sep 17 00:00:00 2001 From: Nodari Chkuaselidze Date: Thu, 23 Feb 2023 15:36:50 +0400 Subject: [PATCH] chain: use nurkel instead of urkel. pkg: update packages. --- lib/blockchain/chain.js | 13 ++-- lib/blockchain/chaindb.js | 76 ++++++++++++--------- lib/mining/miner.js | 2 +- lib/net/packets.js | 9 ++- lib/net/pool.js | 3 +- lib/node/rpc.js | 3 +- package-lock.json | 66 ++++++++++++------ package.json | 2 +- test/auction-reorg-test.js | 6 +- test/auction-test.js | 4 +- test/chain-migration-test.js | 3 +- test/chain-reset-reorg-test.js | 20 +++--- test/chain-tree-compaction-test.js | 104 ++++++++++++++++++----------- test/disable-goosig-test.js | 2 +- test/mempool-test.js | 6 +- test/net-spv-test.js | 14 ++-- test/net-test.js | 2 +- test/node-critical-error-test.js | 2 +- test/node-http-test.js | 2 +- 19 files changed, 202 insertions(+), 137 deletions(-) diff --git a/lib/blockchain/chain.js b/lib/blockchain/chain.js index 2b50df8985..7f9657b122 100644 --- a/lib/blockchain/chain.js +++ b/lib/blockchain/chain.js @@ -173,7 +173,7 @@ class Chain extends AsyncEmitter { // tree interval. We might also need to recover from a // failed compactTree() operation. Either way, there might have been // new blocks added to the chain since then. - const currentRoot = this.db.treeRoot(); + const currentRoot = await this.db.treeRoot(); // We store commit height for the tree in the tree state. // commitHeight is the height of the block that committed tree root. @@ -211,7 +211,8 @@ class Chain extends AsyncEmitter { await this.db.saveNames(view, entry, false); } - this.logger.info('Synchronized Tree Root: %x.', this.db.txn.rootHash()); + this.logger.info('Synchronized Tree Root: %x.', + await this.db.txn.txRootHash()); } /** @@ -737,7 +738,7 @@ class Chain extends AsyncEmitter { const view = new CoinView(); const height = prev.height + 1; - assert(block.treeRoot.equals(this.db.treeRoot())); + assert(block.treeRoot.equals(await this.db.treeRoot())); for (let i = 0; i < block.txs.length; i++) { const tx = block.txs[i]; @@ -786,7 +787,7 @@ class Chain extends AsyncEmitter { let reward = 0; // Check the name tree root. - if (!block.treeRoot.equals(this.db.treeRoot())) { + if (!block.treeRoot.equals(await this.db.treeRoot())) { throw new VerifyError(block, 'invalid', 'bad-tree-root', @@ -1815,7 +1816,7 @@ class Chain extends AsyncEmitter { this.emit('reconnect', entry, block); if ((entry.height % this.network.names.treeInterval) === 0) - this.emit('tree commit', this.db.tree.rootHash(), entry, block); + this.emit('tree commit', await this.db.tree.treeRootHash(), entry, block); return this.emitAsync('connect', entry, block, view); } @@ -1914,7 +1915,7 @@ class Chain extends AsyncEmitter { this.emit('block', block, entry); if ((entry.height % this.network.names.treeInterval) === 0) - this.emit('tree commit', this.db.tree.rootHash(), entry, block); + this.emit('tree commit', await this.db.tree.treeRootHash(), entry, block); return this.emitAsync('connect', entry, block, view); } diff --git a/lib/blockchain/chaindb.js b/lib/blockchain/chaindb.js index 773ce37ecf..f7fa8bc76b 100644 --- a/lib/blockchain/chaindb.js +++ b/lib/blockchain/chaindb.js @@ -9,9 +9,9 @@ const assert = require('bsert'); const bdb = require('bdb'); const bio = require('bufio'); -const blake2b = require('bcrypto/lib/blake2b'); const LRU = require('blru'); -const {Tree} = require('urkel'); +const nurkel = require('nurkel'); +const {Tree} = nurkel; const {BufferMap, BufferSet} = require('buffer-map'); const ChainMigrator = require('./migrations'); const Amount = require('../ui/amount'); @@ -53,14 +53,11 @@ class ChainDB { this.db = bdb.create(this.options); this.name = 'chain'; this.version = 3; - this.tree = new Tree({ - hash: blake2b, - bits: 256, + this.tree = nurkel.create({ prefix: this.options.treePrefix, - cacheOnly: true, - initCacheSize: -1 + memory: this.options.memory || !this.options.treePrefix }); - this.txn = this.tree.txn(); + this.txn = null; this.treeState = new TreeState(); this.stateCache = new StateCache(this.network); this.state = new ChainState(); @@ -84,6 +81,9 @@ class ChainDB { await this.db.open(); await this.tree.open(); + this.txn = this.tree.vtxn(); + await this.txn.open(); + const migrator = new ChainMigrator({ ...this.options, chainDB: this, @@ -137,8 +137,6 @@ class ChainDB { this.logger.info('ChainDB successfully loaded.'); } - this.txn = this.tree.txn(); - this.logger.info( 'Chain State: hash=%x tx=%d coin=%d value=%s burned=%s.', this.state.tip, @@ -147,7 +145,7 @@ class ChainDB { Amount.coin(this.state.value), Amount.coin(this.state.burned)); - this.logger.info('Tree Root: %x.', this.tree.rootHash()); + this.logger.info('Tree Root: %x.', await this.tree.treeRootHash()); } /** @@ -224,8 +222,10 @@ class ChainDB { */ async close() { + await this.txn.close(); + this.txn = null; + await this.tree.close(); - this.txn = this.tree.txn(); return this.db.close(); } @@ -630,8 +630,9 @@ class ChainDB { if (this.options.spv) throw new Error('Cannot lookup in SPV mode.'); - const tree = this.tree.snapshot(root); - return tree.get(key); + const snap = this.tree.snapshot(root); + await snap.open(); + return snap.get(key); } /** @@ -645,17 +646,30 @@ class ChainDB { if (this.options.spv) throw new Error('Cannot prove in SPV mode.'); - const tree = this.tree.snapshot(root); - return tree.prove(key); + const snap = this.tree.snapshot(root); + await snap.open(); + return snap.prove(key); + } + + /** + * Verify Proof + * @param {Proof} + * @param {Buffer} root + * @param {Buffer} key + * @returns {Promise} + */ + + async verify(root, key, proof) { + return Tree.verify(root, key, proof); } /** * Get the current name tree root. - * @returns {Hash} + * @returns {Promise} */ treeRoot() { - return this.tree.rootHash(); + return this.tree.treeRootHash(); } /** @@ -990,6 +1004,8 @@ class ChainDB { */ async compactTree(entry) { + assert(!this.options.memory && this.options.treePrefix, + 'Can not compact in memory.'); // Before doing anything to the tree, // save the target tree root hash to chain database. // If the tree data gets out of sync or corrupted @@ -1006,30 +1022,24 @@ class ChainDB { const tmpDir = this.options.treePrefix + '~'; - const tmpTree = new Tree({ - hash: blake2b, - bits: 256, - prefix: tmpDir - }); - // Make sure to remove the tmp directory first. // There should not be directory, unless it was // stopped in the middle of compaction. // Otherwise compacted tree would add on top // of the previsouly compacted db. + const tmpTree = nurkel.create({ + prefix: tmpDir + }); + await tmpTree.open(); - const tmpStore = tmpTree.store; await tmpTree.close(); - await tmpStore.destroy(); - - // Rewind tree to historical commitment - await this.tree.inject(entry.treeRoot); + await Tree.destroy(tmpDir); - // Delete historical data - await this.tree.compact(tmpDir); + await this.tree.compact(tmpDir, entry.treeRoot); // Reset in-memory tree delta - this.txn = this.tree.txn(); + this.txn = this.tree.vtxn(); + await this.txn.open(); // Mark tree compaction complete this.start(); @@ -2081,7 +2091,7 @@ class ChainDB { // Chain will need to recover current txn // from treeState.commitHeight + 1 (including). this.put(layout.s.encode(), this.pendingTreeState.commit( - this.tree.rootHash(), + await this.tree.treeRootHash(), entry.height )); } diff --git a/lib/mining/miner.js b/lib/mining/miner.js index daa067eeb1..7704a4654f 100644 --- a/lib/mining/miner.js +++ b/lib/mining/miner.js @@ -135,7 +135,7 @@ class Miner extends EventEmitter { const state = await this.chain.getDeployments(time, tip); const target = await this.chain.getTarget(time, tip); - const root = this.chain.db.treeRoot(); + const root = await this.chain.db.treeRoot(); const attempt = new BlockTemplate({ prevBlock: tip.hash, diff --git a/lib/net/packets.js b/lib/net/packets.js index cb3f321f21..dfb86edc9e 100644 --- a/lib/net/packets.js +++ b/lib/net/packets.js @@ -12,8 +12,7 @@ const assert = require('bsert'); const bio = require('bufio'); -const blake2b = require('bcrypto/lib/blake2b'); -const UrkelProof = require('urkel').Proof; +const UrkelProof = require('nurkel').Proof; const {BloomFilter} = require('bfilter'); const common = require('./common'); const util = require('../utils/util'); @@ -1669,7 +1668,7 @@ class ProofPacket extends Packet { getSize() { let size = 64; - size += this.proof.getSize(blake2b, 256); + size += this.proof.getSize(); return size; } @@ -1681,7 +1680,7 @@ class ProofPacket extends Packet { write(bw) { bw.writeHash(this.root); bw.writeBytes(this.key); - this.proof.writeBW(bw, blake2b, 256); + this.proof.writeBW(bw); return this; } @@ -1694,7 +1693,7 @@ class ProofPacket extends Packet { read(br) { this.root = br.readHash(); this.key = br.readBytes(32); - this.proof = UrkelProof.readBR(br, blake2b, 256); + this.proof = UrkelProof.readBR(br); return this; } } diff --git a/lib/net/pool.js b/lib/net/pool.js index d12d6f42cb..bce061a918 100644 --- a/lib/net/pool.js +++ b/lib/net/pool.js @@ -16,7 +16,6 @@ const socks = require('bsocks'); const List = require('blst'); const base32 = require('bcrypto/lib/encoding/base32'); const {BufferMap, BufferSet} = require('buffer-map'); -const blake2b = require('bcrypto/lib/blake2b'); const {BloomFilter, RollingFilter} = require('bfilter'); const rng = require('bcrypto/lib/random'); const secp256k1 = require('bcrypto/lib/secp256k1'); @@ -3284,7 +3283,7 @@ class Pool extends EventEmitter { return; } - const [code, data] = proof.verify(root, key, blake2b, 256); + const [code, data] = await this.chain.db.verify(root, key, proof); if (code !== 0) { this.logger.warning( diff --git a/lib/node/rpc.js b/lib/node/rpc.js index d7f1bd1153..3741222355 100644 --- a/lib/node/rpc.js +++ b/lib/node/rpc.js @@ -2404,8 +2404,7 @@ class RPC extends RPCBase { const iter = txn.iterator(); - while (await iter.next()) { - const {key, value} = iter; + for await (const [key, value] of iter) { const ns = NameState.decode(value); ns.nameHash = key; diff --git a/package-lock.json b/package-lock.json index 541f4a222c..c874d1b054 100644 --- a/package-lock.json +++ b/package-lock.json @@ -35,7 +35,7 @@ "goosig": "~0.10.0", "hs-client": "~0.0.13", "n64": "~0.2.10", - "urkel": "~1.0.2" + "nurkel": "~0.0.10" }, "bin": { "hs-seeder": "bin/hs-seeder", @@ -384,9 +384,9 @@ } }, "node_modules/bval": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/bval/-/bval-0.1.6.tgz", - "integrity": "sha512-jxNH9gSx7g749hQtS+nTxXYz/bLxwr4We1RHFkCYalNYcj12RfbW6qYWsKu0RYiKAdFcbNoZRHmWrIuXIyhiQQ==", + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/bval/-/bval-0.1.7.tgz", + "integrity": "sha512-Ohv+MV7C0xAt5DJ4u681EVCArZ+ybEpz8yPjAozBEZYmR/rp9AX9QJB9PJ0aVlzEodjr3PIKTQrZfaONP8XR9w==", "dependencies": { "bsert": "~0.0.10" }, @@ -395,9 +395,9 @@ } }, "node_modules/bweb": { - "version": "0.1.11", - "resolved": "https://registry.npmjs.org/bweb/-/bweb-0.1.11.tgz", - "integrity": "sha512-zi9FtNpPD+Mv9UAZFRzV3jnUcEDPJTeUS8JDvsR9Nlq4fwc93SBJUui0PrC1U1GO8zzopoXtoOZ5Do9L46Rv2Q==", + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/bweb/-/bweb-0.1.12.tgz", + "integrity": "sha512-ctj1FnoTF+7/dRiRYVVwY/6lV/NYQ5Gz8HS0LsyUxae7fMJGKx2geHHIcIXeZ5DolS49Xq9e4rh29aG/vAJrWw==", "dependencies": { "bsert": "~0.0.10", "bsock": "~0.1.8" @@ -471,6 +471,22 @@ "node": ">=2.0.0" } }, + "node_modules/nurkel": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/nurkel/-/nurkel-0.0.10.tgz", + "integrity": "sha512-SEtMmnsMaxm5rkai5Wo27sA5sKeMaUHGEWgoXbHZqePkPeL6Kv6Bqa1g2J1hVd1tu/y0oL46MWD3bQB7G0W+Xg==", + "hasInstallScript": true, + "dependencies": { + "bfile": "^0.2.2", + "bsert": "^0.0.10", + "buffer-map": "^0.0.7", + "loady": "^0.0.5", + "urkel": "^1.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/unbound": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/unbound/-/unbound-0.4.3.tgz", @@ -485,9 +501,9 @@ } }, "node_modules/urkel": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/urkel/-/urkel-1.0.2.tgz", - "integrity": "sha512-Y5UXbgBr6pczrD08N0SYJkWjtdtTTpmZsOvuftdrEHLnTjuxwSNjKsXYLQkICTptvnHAJ2OjI6XdAxtYTyOHew==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/urkel/-/urkel-1.0.3.tgz", + "integrity": "sha512-L2M46WWSaz1LpyUYFgnQg7WSOWtNcRx3uH+4GwHK1jbmYj6phLuIwirTVMlhfcZ0o/CWn5Y04UWLhmlvijZiDg==", "dependencies": { "bfile": "~0.2.1", "bmutex": "~0.1.6", @@ -728,17 +744,17 @@ } }, "bval": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/bval/-/bval-0.1.6.tgz", - "integrity": "sha512-jxNH9gSx7g749hQtS+nTxXYz/bLxwr4We1RHFkCYalNYcj12RfbW6qYWsKu0RYiKAdFcbNoZRHmWrIuXIyhiQQ==", + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/bval/-/bval-0.1.7.tgz", + "integrity": "sha512-Ohv+MV7C0xAt5DJ4u681EVCArZ+ybEpz8yPjAozBEZYmR/rp9AX9QJB9PJ0aVlzEodjr3PIKTQrZfaONP8XR9w==", "requires": { "bsert": "~0.0.10" } }, "bweb": { - "version": "0.1.11", - "resolved": "https://registry.npmjs.org/bweb/-/bweb-0.1.11.tgz", - "integrity": "sha512-zi9FtNpPD+Mv9UAZFRzV3jnUcEDPJTeUS8JDvsR9Nlq4fwc93SBJUui0PrC1U1GO8zzopoXtoOZ5Do9L46Rv2Q==", + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/bweb/-/bweb-0.1.12.tgz", + "integrity": "sha512-ctj1FnoTF+7/dRiRYVVwY/6lV/NYQ5Gz8HS0LsyUxae7fMJGKx2geHHIcIXeZ5DolS49Xq9e4rh29aG/vAJrWw==", "requires": { "bsert": "~0.0.10", "bsock": "~0.1.8" @@ -783,6 +799,18 @@ "resolved": "https://registry.npmjs.org/n64/-/n64-0.2.10.tgz", "integrity": "sha512-uH9geV4+roR1tohsrrqSOLCJ9Mh1iFcDI+9vUuydDlDxUS1UCAWUfuGb06p3dj3flzywquJNrGsQ7lHP8+4RVQ==" }, + "nurkel": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/nurkel/-/nurkel-0.0.10.tgz", + "integrity": "sha512-SEtMmnsMaxm5rkai5Wo27sA5sKeMaUHGEWgoXbHZqePkPeL6Kv6Bqa1g2J1hVd1tu/y0oL46MWD3bQB7G0W+Xg==", + "requires": { + "bfile": "^0.2.2", + "bsert": "^0.0.10", + "buffer-map": "^0.0.7", + "loady": "^0.0.5", + "urkel": "^1.0.2" + } + }, "unbound": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/unbound/-/unbound-0.4.3.tgz", @@ -793,9 +821,9 @@ } }, "urkel": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/urkel/-/urkel-1.0.2.tgz", - "integrity": "sha512-Y5UXbgBr6pczrD08N0SYJkWjtdtTTpmZsOvuftdrEHLnTjuxwSNjKsXYLQkICTptvnHAJ2OjI6XdAxtYTyOHew==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/urkel/-/urkel-1.0.3.tgz", + "integrity": "sha512-L2M46WWSaz1LpyUYFgnQg7WSOWtNcRx3uH+4GwHK1jbmYj6phLuIwirTVMlhfcZ0o/CWn5Y04UWLhmlvijZiDg==", "requires": { "bfile": "~0.2.1", "bmutex": "~0.1.6", diff --git a/package.json b/package.json index 7656b074ee..fa76014b5d 100644 --- a/package.json +++ b/package.json @@ -46,7 +46,7 @@ "goosig": "~0.10.0", "hs-client": "~0.0.13", "n64": "~0.2.10", - "urkel": "~1.0.2" + "nurkel": "~0.0.10" }, "devDependencies": { "bmocha": "^2.1.8" diff --git a/test/auction-reorg-test.js b/test/auction-reorg-test.js index 5969c1d6b1..132209545f 100644 --- a/test/auction-reorg-test.js +++ b/test/auction-reorg-test.js @@ -265,7 +265,7 @@ describe('Auction Reorg', function() { assert((chain.height % treeInterval) === 0); snapshot = { - treeRoot: chain.db.txn.rootHash(), + treeRoot: await chain.db.txn.txRootHash(), ns: await chain.db.getNameStateByName(NAME1) }; }); @@ -395,10 +395,10 @@ describe('Auction Reorg', function() { it('should have the same DB root', async () => { assert((chain.height % network.names.treeInterval) !== 0); - const root = chain.db.txn.rootHash(); + const root = await chain.db.txn.txRootHash(); await chain.close(); await chain.open(); - assert.bufferEqual(root, chain.db.txn.rootHash()); + assert.bufferEqual(root, await chain.db.txn.txRootHash()); }); it('should cleanup', async () => { diff --git a/test/auction-test.js b/test/auction-test.js index a224d9ad31..a2984388aa 100644 --- a/test/auction-test.js +++ b/test/auction-test.js @@ -379,10 +379,10 @@ describe('Auction', function() { it('should have the same DB root', async () => { assert((chain.height % network.names.treeInterval) !== 0); - const root = chain.db.txn.rootHash(); + const root = await chain.db.txn.txRootHash(); await chain.close(); await chain.open(); - assert.bufferEqual(root, chain.db.txn.rootHash()); + assert.bufferEqual(root, await chain.db.txn.txRootHash()); }); it('should not have transfer stats in JSON yet', async () => { diff --git a/test/chain-migration-test.js b/test/chain-migration-test.js index 5183e951dd..e18c989ad1 100644 --- a/test/chain-migration-test.js +++ b/test/chain-migration-test.js @@ -4,6 +4,7 @@ const assert = require('bsert'); const fs = require('bfile'); const {encoding} = require('bufio'); const {ZERO_HASH} = require('../lib/protocol/consensus'); +const {statusCodes} = require('nurkel'); const Network = require('../lib/protocol/network'); const WorkerPool = require('../lib/workers/workerpool'); const Miner = require('../lib/mining/miner'); @@ -971,7 +972,7 @@ describe('Chain Migrations', function() { // Now our error should be incorrect tree (after migration) assert(error, 'Chain must throw an error.'); - assert.strictEqual(error.message, `Missing node: ${root.toString('hex')}.`); + assert.strictEqual(statusCodes[error.code], statusCodes.URKEL_ENOTFOUND); const version = getVersion(await ldb.get(layout.V.encode()), 'chain'); assert.strictEqual(version, 3); diff --git a/test/chain-reset-reorg-test.js b/test/chain-reset-reorg-test.js index 0dfb93e064..a1a5143355 100644 --- a/test/chain-reset-reorg-test.js +++ b/test/chain-reset-reorg-test.js @@ -133,7 +133,7 @@ describe('Chain reorg/reset test', function() { // let's mine 2 on the best first. const names1 = await mineBlocksOpens(mainMiner, 2); - assert.bufferEqual(chain.db.treeRoot(), root); + assert.bufferEqual(await chain.db.treeRoot(), root); for (const name of [...names0, ...names1]) { assert.strictEqual(await chainTreeHas(chain, name), false); @@ -145,7 +145,7 @@ describe('Chain reorg/reset test', function() { await syncChain(altChain, chain, tipHeight); tipHeight += 3; - assert.bufferEqual(chain.db.treeRoot(), root); + assert.bufferEqual(await chain.db.treeRoot(), root); for (const name of [...names0, ...names2]) { assert.strictEqual(await chainTreeHas(chain, name), false); @@ -176,18 +176,18 @@ describe('Chain reorg/reset test', function() { assert.strictEqual(await chainTxnHas(chain, name), false); } - assert.notBufferEqual(chain.db.treeRoot(), root); + assert.notBufferEqual(await chain.db.treeRoot(), root); // Now txn is empty and its root should be the same as the tree root. - assert.bufferEqual(chain.db.treeRoot(), chain.db.txn.rootHash()); - assert.bufferEqual(altChain.db.treeRoot(), altChain.db.txn.rootHash()); - assert.bufferEqual(altChain.db.treeRoot(), chain.db.treeRoot()); + assert.bufferEqual(await chain.db.treeRoot(), await chain.db.txn.txRootHash()); + assert.bufferEqual(await altChain.db.treeRoot(), await altChain.db.txn.txRootHash()); + assert.bufferEqual(await altChain.db.treeRoot(), await chain.db.treeRoot()); }); it('should reorg 3 blocks and check tree (at interval)', async () => { assert.strictEqual(chain.tip.height, tipHeight); assert.strictEqual(altChain.tip.height, tipHeight); - const root = chain.db.treeRoot(); + const root = await chain.db.treeRoot(); // move forward to 48 const names0 = await mineBlocksOpens(mainMiner, 3); @@ -199,8 +199,8 @@ describe('Chain reorg/reset test', function() { assert.strictEqual(await chainTxnHas(chain, name), true); } - assert.notBufferEqual(chain.db.txn.rootHash(), root); - assert.bufferEqual(chain.db.treeRoot(), root); + assert.notBufferEqual(await chain.db.txn.txRootHash(), root); + assert.bufferEqual(await chain.db.treeRoot(), root); assert.strictEqual(chain.tip.height, tipHeight); // mine 3 blocks. @@ -324,7 +324,7 @@ describe('Chain reorg/reset test', function() { await syncChain(chain, altChain, tipHeight); tipHeight += 2; - assert.bufferEqual(chain.db.treeRoot(), root); + assert.bufferEqual(await chain.db.treeRoot(), root); for (const name of [...names0, ...resetNames]) { assert.strictEqual(await chainTreeHas(chain, name), false); diff --git a/test/chain-tree-compaction-test.js b/test/chain-tree-compaction-test.js index 84ac74bb17..9bea42d656 100644 --- a/test/chain-tree-compaction-test.js +++ b/test/chain-tree-compaction-test.js @@ -4,6 +4,7 @@ const os = require('os'); const path = require('path'); const fs = require('bfile'); const assert = require('bsert'); +const {statusCodesByVal, statusCodes} = require('nurkel'); const consensus = require('../lib/protocol/consensus'); const Network = require('../lib/protocol/network'); const Miner = require('../lib/mining/miner'); @@ -50,7 +51,6 @@ describe('Tree Compacting', function() { `hsd-tree-compacting-test-${Date.now()}` ); const treePath = path.join(prefix, 'tree'); - const treePart1 = path.join(prefix, 'tree', '0000000001'); // This is the chain we are testing, // we are going to compact its tree @@ -123,11 +123,21 @@ describe('Tree Compacting', function() { const checkTree = async (compacted = false) => { for (const [index, hash] of treeRoots.entries()) { if (compacted && index < (treeRoots.length - 8)) { - // Old root node has been deleted, tree state can not be restored. - await assert.rejects( - chain.db.tree.inject(hash), - {message: `Missing node: ${hash.toString('hex')}.`} + let err; + + try { + // Old root node has been deleted, tree state can not be restored. + await chain.db.tree.inject(hash); + } catch (e) { + err = e; + } + + assert(err, 'tree inject must throw.'); + assert.strictEqual( + err.code, + statusCodesByVal[statusCodes.URKEL_ENOTFOUND] ); + continue; } @@ -219,9 +229,9 @@ describe('Tree Compacting', function() { }); it('should compact tree', async () => { - const before = await fs.stat(treePart1); + const before = await chain.db.tree.stat(); await chain.compactTree(); - const after = await fs.stat(treePart1); + const after = await chain.db.tree.stat(); // Urkel Tree should be smaller now. // Urkel Tree files are padded to ensure that Meta nodes are written @@ -274,9 +284,9 @@ describe('Tree Compacting', function() { it('should compact tree a second time with no new data', async () => { // If user executes rpc compacttree repeatedly, // it shouldn't break anything. - const before = await fs.stat(treePart1); + const before = await chain.db.tree.stat(); await chain.compactTree(); - const after = await fs.stat(treePart1); + const after = await chain.db.tree.stat(); // Should be no change assert.strictEqual(before.size, after.size); @@ -297,14 +307,14 @@ describe('Tree Compacting', function() { await mineBlocks(treeInterval, mempool); // Tree and txn are synced due to tree commitment. - assert.bufferEqual(chain.db.tree.rootHash(), chain.db.txn.rootHash()); + assert.bufferEqual(await chain.db.tree.treeRootHash(), await chain.db.txn.txRootHash()); // Increment counter and confirm, but do not advance to tree interval. send(await wallet.sendUpdate(name, Buffer.from([++counter])), mempool); await mineBlocks(1, mempool); // The txn is updated, but the tree is still in last-committed state - assert.notBufferEqual(chain.db.tree.rootHash(), chain.db.txn.rootHash()); + assert.notBufferEqual(await chain.db.tree.treeRootHash(), await chain.db.txn.txRootHash()); raw = await chain.db.txn.get(nameHash); ns = NameState.decode(raw); assert.bufferEqual(ns.data, Buffer.from([counter])); @@ -313,19 +323,19 @@ describe('Tree Compacting', function() { assert.bufferEqual(ns.data, Buffer.from([counter - 1])); // Save - const txnRootBefore = chain.db.txn.rootHash(); - const treeRootBefore = chain.db.tree.rootHash(); + const txnRootBefore = await chain.db.txn.txRootHash(); + const treeRootBefore = await chain.db.tree.treeRootHash(); // Compact - const before = await fs.stat(treePart1); + const before = await chain.db.tree.stat(); await chain.compactTree(); - const after = await fs.stat(treePart1); + const after = await chain.db.tree.stat(); assert(before.size > after.size); // Check - assert.bufferEqual(txnRootBefore, chain.db.txn.rootHash()); - assert.bufferEqual(treeRootBefore, chain.db.tree.rootHash()); - assert.notBufferEqual(chain.db.tree.rootHash(), chain.db.txn.rootHash()); + assert.bufferEqual(txnRootBefore, await chain.db.txn.txRootHash()); + assert.bufferEqual(treeRootBefore, await chain.db.tree.treeRootHash()); + assert.notBufferEqual(await chain.db.tree.treeRootHash(), await chain.db.txn.txRootHash()); raw = await chain.db.txn.get(nameHash); ns = NameState.decode(raw); assert.bufferEqual(ns.data, Buffer.from([counter])); @@ -346,14 +356,14 @@ describe('Tree Compacting', function() { await mineBlocks(treeInterval, mempool); } - const before = await fs.stat(treePart1); + const before = await chain.db.tree.stat(); // Rewind the tree 6 intervals and compact, but do not sync to tip yet. const entry = await chain.getEntry(chain.height - 6 * treeInterval); await chain.db.compactTree(entry); // Confirm tree state has been rewound - assert.notBufferEqual(chain.db.tree.rootHash(), chain.tip.treeRoot); + assert.notBufferEqual(await chain.db.tree.treeRootHash(), chain.tip.treeRoot); // Oops, we abort before calling chain.syncTree() await miner.close(); @@ -368,11 +378,11 @@ describe('Tree Compacting', function() { await miner.open(); // Tree was compacted - const after = await fs.stat(treePart1); + const after = await chain.db.tree.stat(); assert(before.size > after.size); // Tree was re-synced automatically to chain tip on restart - assert.bufferEqual(chain.db.tree.rootHash(), chain.tip.treeRoot); + assert.bufferEqual(await chain.db.tree.treeRootHash(), chain.tip.treeRoot); raw = await chain.db.tree.get(nameHash); ns = NameState.decode(raw); assert.bufferEqual(ns.data, Buffer.from([counter + 21])); @@ -398,7 +408,7 @@ describe('Tree Compacting', function() { const CHAIN_DB_COMMIT = chain.db.commit; // Current tree root before crash - const treeRoot = chain.db.treeRoot(); + const treeRoot = await chain.db.treeRoot(); // Implement bug where node crashes before database batch is written. // When the next block is connected, it should successfully write @@ -406,7 +416,7 @@ describe('Tree Compacting', function() { // or levelDB indexes. chain.db.commit = async () => { // Tree root has been updated inside Urkel - const newRoot1 = chain.db.treeRoot(); + const newRoot1 = await chain.db.treeRoot(); assert(!treeRoot.equals(newRoot1)); // Reset batch, otherwise assert(!this.current) fails @@ -418,7 +428,15 @@ describe('Tree Compacting', function() { // Update name and attempt to confirm send(update, mempool); // Will "crash" node before completing operation - await mineBlocks(1, mempool); + let err; + try { + await mineBlocks(1, mempool); + } catch (e) { + err = e; + } + + assert(err, 'mineBlock must fail.'); + assert.strictEqual(err.message, 'Database is not open.'); assert(!chain.opened); // Restore proper batch-write function @@ -428,7 +446,7 @@ describe('Tree Compacting', function() { await chain.open(); // Tree root has been restored from pre-crash state - const newRoot2 = chain.db.treeRoot(); + const newRoot2 = await chain.db.treeRoot(); assert(treeRoot.equals(newRoot2)); // Try that update again with healthy chainDB @@ -437,11 +455,11 @@ describe('Tree Compacting', function() { // Tree has been updated but tree root won't be committed // to a block header until the next block. - assert(!chain.db.tree.rootHash().equals(chain.tip.treeRoot)); + assert(!(await chain.db.tree.treeRootHash()).equals(chain.tip.treeRoot)); await mineBlocks(1); // Everything is in order - assert.bufferEqual(chain.db.tree.rootHash(), chain.tip.treeRoot); + assert.bufferEqual(await chain.db.tree.treeRootHash(), chain.tip.treeRoot); raw = await chain.db.tree.get(nameHash); ns = NameState.decode(raw); assert.bufferEqual(ns.data, Buffer.from([counter + 1])); @@ -469,9 +487,9 @@ describe('Tree Compacting', function() { await checkTree(true); - const before = await fs.stat(treePart1); + const before = await chain.db.tree.stat(); await chain.reconstructTree(); - const after = await fs.stat(treePart1); + const after = await chain.db.tree.stat(); assert(before.size < after.size); @@ -486,9 +504,9 @@ describe('Tree Compacting', function() { await checkTree(false); // let's compact again and reconstruct - const before = await fs.stat(treePart1); + const before = await chain.db.tree.stat(); await chain.compactTree(); - const after = await fs.stat(treePart1); + const after = await chain.db.tree.stat(); assert(before.size > after.size); @@ -511,9 +529,9 @@ describe('Tree Compacting', function() { this.skip(); const tmpPath = treePath + '~'; - const beforeRecovery = await fs.stat(treePart1); + const beforeRecovery = await chain.db.tree.stat(); await chain.reconstructTree(); - const afterRecovery = await fs.stat(treePart1); + const afterRecovery = await chain.db.tree.stat(); assert(beforeRecovery.size < afterRecovery.size); await fs.copy(treePath, tmpPath); @@ -521,7 +539,7 @@ describe('Tree Compacting', function() { await fs.remove(path.join(tmpPath, 'lock')); await chain.compactTree(); - const afterCompaction = await fs.stat(treePart1); + const afterCompaction= await chain.db.tree.stat(); // If we don't remove existing TMP directory // afterCompaction would be bigger than afterRecovery. @@ -1104,9 +1122,19 @@ describe('Tree Compacting', function() { await chain.db.tree.inject(root); expected--; } else { - await assert.rejects( - chain.db.tree.inject(root), - {message: `Missing node: ${root.toString('hex')}.`} + let err; + + try { + // Old root node has been deleted, tree state can not be restored. + await chain.db.tree.inject(root); + } catch (e) { + err = e; + } + + assert(err, 'tree inject must throw.'); + assert.strictEqual( + err.code, + statusCodesByVal[statusCodes.URKEL_ENOTFOUND] ); } } diff --git a/test/disable-goosig-test.js b/test/disable-goosig-test.js index d09726a155..8d975990b0 100644 --- a/test/disable-goosig-test.js +++ b/test/disable-goosig-test.js @@ -142,7 +142,7 @@ describe('Disable GooSig', function() { const mtp = await node.chain.getMedianTime(tip); const time = Math.max(node.network.now(), mtp + 1); const target = await node.chain.getTarget(time, tip); - const root = node.chain.db.treeRoot(); + const root = await node.chain.db.treeRoot(); const template = new BlockTemplate({ prevBlock: tip.hash, diff --git a/test/mempool-test.js b/test/mempool-test.js index 9a4725c12f..e62620e038 100644 --- a/test/mempool-test.js +++ b/test/mempool-test.js @@ -647,7 +647,7 @@ describe('Mempool', function() { // Ensure mockblocks are unique (required for reorg testing) block.merkleRoot = block.createMerkleRoot(); block.witnessRoot = block.createWitnessRoot(); - block.treeRoot = chain.db.treeRoot(); + block.treeRoot = await chain.db.treeRoot(); return [block, view]; } @@ -1395,7 +1395,7 @@ describe('Mempool', function() { // Ensure mockblocks are unique (required for reorg testing) block.merkleRoot = block.createMerkleRoot(); block.witnessRoot = block.createWitnessRoot(); - block.treeRoot = chain.db.treeRoot(); + block.treeRoot = await chain.db.treeRoot(); return [block, view]; } @@ -1582,7 +1582,7 @@ describe('Mempool', function() { block.bits = await chain.getTarget(block.time, chain.tip); block.merkleRoot = block.createMerkleRoot(); block.witnessRoot = block.createWitnessRoot(); - block.treeRoot = chain.db.treeRoot(); + block.treeRoot = await chain.db.treeRoot(); const entry = await chain.add(block, VERIFY_BODY); // Crazy hack to spend coinbase diff --git a/test/net-spv-test.js b/test/net-spv-test.js index 2bc36d4aff..15503cf9f3 100644 --- a/test/net-spv-test.js +++ b/test/net-spv-test.js @@ -10,7 +10,7 @@ const rules = require('../lib/covenants/rules'); const NameState = require('../lib/covenants/namestate'); const {Resource} = require('../lib/dns/resource'); const {types: packetTypes} = require('../lib/net/packets'); -const {types: urkelTypes} = require('urkel').Proof; +const {proofTypes, statusCodes} = require('nurkel'); const {forValue} = require('./util/common'); const network = Network.get('regtest'); @@ -112,7 +112,7 @@ describe('SPV', function() { const ns = await spv.pool.resolve(Buffer.alloc(32, 0xab)); assert.strictEqual(ns, null); const proofType = await waiter; - assert.strictEqual(proofType, urkelTypes.TYPE_DEADEND); + assert.strictEqual(proofType, proofTypes.TYPE_DEADEND); }); it('should run auction and register name', async () => { @@ -146,7 +146,7 @@ describe('SPV', function() { const ns = await spv.pool.resolve(Buffer.alloc(32, 0xab)); assert.strictEqual(ns, null); const proofType = await waiter; - assert.strictEqual(proofType, urkelTypes.TYPE_COLLISION); + assert.strictEqual(proofType, proofTypes.TYPE_COLLISION); }); it('should get proof of existence with data', async () => { @@ -162,7 +162,7 @@ describe('SPV', function() { const res = Resource.decode(ns.data); assert.strictEqual(res.records[0].ns, 'one.'); const proofType = await waiter; - assert.strictEqual(proofType, urkelTypes.TYPE_EXISTS); + assert.strictEqual(proofType, proofTypes.TYPE_EXISTS); }); it('should update name data', async () => { @@ -192,7 +192,7 @@ describe('SPV', function() { const res = Resource.decode(ns.data); assert.strictEqual(res.records[0].ns, 'two.'); const proofType = await waiter; - assert.strictEqual(proofType, urkelTypes.TYPE_EXISTS); + assert.strictEqual(proofType, proofTypes.TYPE_EXISTS); }); it('should get historical data', async () => { @@ -216,7 +216,7 @@ describe('SPV', function() { const res = Resource.decode(ns.data); assert.strictEqual(res.records[0].ns, 'one.'); const proofType = await waiter1; - assert.strictEqual(proofType, urkelTypes.TYPE_EXISTS); + assert.strictEqual(proofType, proofTypes.TYPE_EXISTS); await spv.chain.removeInvalid(entry.hash); await forValue(spv.chain, 'height', full.chain.height); @@ -257,7 +257,7 @@ describe('SPV', function() { // This is the error thrown by the full node trying to serve the proof. assert(err); - assert.strictEqual(err.code, 'ERR_MISSING_NODE'); + assert.strictEqual(statusCodes[err.code], statusCodes.URKEL_ENOTFOUND); // :-( assert.strictEqual(spv.pool.peers.outbound, 0); diff --git a/test/net-test.js b/test/net-test.js index af74a9104a..4a6f4cb15a 100644 --- a/test/net-test.js +++ b/test/net-test.js @@ -20,7 +20,7 @@ const TX = require('../lib/primitives/tx'); const Claim = require('../lib/primitives/claim'); const Network = require('../lib/protocol/network'); const genesis = require('../lib/protocol/genesis'); -const UrkelProof = require('urkel').Proof; +const UrkelProof = require('nurkel').Proof; const blake2b = require('bcrypto/lib/blake2b'); const AirdropProof = require('../lib/primitives/airdropproof'); const util = require('../lib/utils/util'); diff --git a/test/node-critical-error-test.js b/test/node-critical-error-test.js index 7722d43278..4e20f6d516 100644 --- a/test/node-critical-error-test.js +++ b/test/node-critical-error-test.js @@ -129,7 +129,7 @@ describe('Node Critical Error', function() { }); await mineBlocks(node, 50); - node.chain.db.tree.store.commit = () => { + node.chain.db.txn.commit = () => { throw new Error('Disk full!'); }; await mineBlocks(node, 50); diff --git a/test/node-http-test.js b/test/node-http-test.js index b7335a8805..891aff33ed 100644 --- a/test/node-http-test.js +++ b/test/node-http-test.js @@ -307,7 +307,7 @@ describe('Node HTTP', function() { assert.equal(socketData.length, 1); const {root, block, entry} = socketData[0]; - assert.bufferEqual(node.chain.db.treeRoot(), root); + assert.bufferEqual(await node.chain.db.treeRoot(), root); const info = await nclient.getInfo(); assert.notEqual(pre.chain.tip, info.chain.tip);