Adapt to new async db functions

This commit is contained in:
Jacob Karlsson 2024-04-27 22:41:01 +02:00
parent 07c3e295b2
commit 530db5b96b
5 changed files with 262 additions and 211 deletions

View File

@ -1,25 +0,0 @@
name: CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
matrix:
node-version: [18.x, 20.x]
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- run: npm install
- run: npm test

13
.woodpecker.yaml Normal file
View File

@ -0,0 +1,13 @@
matrix:
NODE_VERSION:
- 18
- 20
steps:
test:
when:
event: [push]
image: node:${NODE_VERSION}
commands:
- npm install
- npm test

View File

@ -1,5 +1,6 @@
const Obz = require('obz') const Obz = require('obz')
const MsgV4 = require('ppppp-db/msg-v4') const MsgV4 = require('ppppp-db/msg-v4')
const pull = require('pull-stream')
const PREFIX = 'set_v1__' const PREFIX = 'set_v1__'
@ -191,23 +192,35 @@ function initSet(peer, config) {
/** /**
* @param {string} id * @param {string} id
* @param {string} subdomain * @param {string} subdomain
* @param {(err: Error | null, tangle?: any) => void} cb
*/ */
function readSet(id, subdomain) { function readSet(id, subdomain, cb) {
const domain = fromSubdomain(subdomain) const domain = fromSubdomain(subdomain)
const mootID = MsgV4.getMootID(id, domain) const mootID = MsgV4.getMootID(id, domain)
const tangle = peer.db.getTangle(mootID) // @ts-ignore
if (!tangle) return new Set() peer.db.getTangle(mootID, (err, tangle) => {
const msgIDs = tangle.topoSort() if (err) return cb(err)
const set = new Set() if (!tangle) return cb(null, new Set())
for (const msgID of msgIDs) { const msgIDs = tangle.topoSort()
const msg = peer.db.get(msgID) const set = new Set()
if (isValidSetMsg(msg)) { pull(
const { add, del } = msg.data pull.values(msgIDs),
for (const value of add) set.add(value) pull.asyncMap((msgID, cb) => {
for (const value of del) set.delete(value) peer.db.get(msgID, cb)
} }),
} pull.drain((msg) => {
return set if (isValidSetMsg(msg)) {
const { add, del } = msg.data
for (const value of add) set.add(value)
for (const value of del) set.delete(value)
}
}, (err) => {
// prettier-ignore
if (err) return cb(Error("Stream failed on readSet()", { cause: err }))
return cb(null, set)
})
)
})
} }
/** /**
@ -278,25 +291,29 @@ function initSet(peer, config) {
/** /**
* @param {string} subdomain * @param {string} subdomain
* @param {CB<number>} cb
*/ */
function _squeezePotential(subdomain) { function _squeezePotential(subdomain, cb) {
// prettier-ignore // prettier-ignore
if (!loadedAccountID) throw new Error('Cannot squeeze potential before loading') if (!loadedAccountID) return cb(Error('Cannot squeeze potential before loading'))
// TODO: improve this so that the squeezePotential is the size of the // TODO: improve this so that the squeezePotential is the size of the
// tangle suffix built as a slice from the fieldRoots // tangle suffix built as a slice from the fieldRoots
const mootID = MsgV4.getMootID(loadedAccountID, fromSubdomain(subdomain)) const mootID = MsgV4.getMootID(loadedAccountID, fromSubdomain(subdomain))
const tangle = peer.db.getTangle(mootID) // @ts-ignore
if (!tangle) return 0 peer.db.getTangle(mootID, (err, tangle) => {
const maxDepth = tangle.maxDepth if (err) return cb(err)
const currentItemRoots = itemRoots.getAll(subdomain) if (!tangle) return cb(null, 0)
let minDepth = Infinity const maxDepth = tangle.maxDepth
for (const item in currentItemRoots) { const currentItemRoots = itemRoots.getAll(subdomain)
for (const msgID of currentItemRoots[item]) { let minDepth = Infinity
const depth = tangle.getDepth(msgID) for (const item in currentItemRoots) {
if (depth < minDepth) minDepth = depth for (const msgID of currentItemRoots[item]) {
const depth = tangle.getDepth(msgID)
if (depth < minDepth) minDepth = depth
}
} }
} return cb(null, maxDepth - minDepth)
return maxDepth - minDepth })
} }
//#endregion //#endregion
@ -320,8 +337,8 @@ function initSet(peer, config) {
// microtask is needed to ensure that loadPromise is assigned BEFORE this // microtask is needed to ensure that loadPromise is assigned BEFORE this
// body is executed (which in turn does inversion of control when `cb` or // body is executed (which in turn does inversion of control when `cb` or
// `resolve` is called) // `resolve` is called)
queueMicrotask(() => { queueMicrotask(async () => {
for (const rec of peer.db.records()) { for await (const rec of peer.db.records()) {
if (!rec.msg) continue if (!rec.msg) continue
maybeLearnAboutSet(rec.id, rec.msg) maybeLearnAboutSet(rec.id, rec.msg)
} }
@ -354,39 +371,43 @@ function initSet(peer, config) {
loaded(() => { loaded(() => {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot add to Set before loading') assert(!!loadedAccountID, 'Cannot add to Set before loading')
const currentSet = readSet(loadedAccountID, subdomain) readSet(loadedAccountID, subdomain, (err, currentSet) => {
if (currentSet.has(value)) return cb(null, false) if (err) return cb(err)
const domain = fromSubdomain(subdomain)
// Populate supersedes if (currentSet.has(value)) return cb(null, false)
const supersedes = [] const domain = fromSubdomain(subdomain)
const toDeleteFromItemRoots = new Map()
const currentItemRoots = itemRoots.getAll(subdomain) // Populate supersedes
for (const item in currentItemRoots) { const supersedes = []
// If we are re-adding this item, OR if this item has been deleted, const toDeleteFromItemRoots = new Map()
// then we should update roots const currentItemRoots = itemRoots.getAll(subdomain)
if (item === value || !currentSet.has(item)) { for (const item in currentItemRoots) {
supersedes.push(...currentItemRoots[item]) // If we are re-adding this item, OR if this item has been deleted,
for (const msgID of currentItemRoots[item]) { // then we should update roots
toDeleteFromItemRoots.set(msgID, item) if (item === value || !currentSet.has(item)) {
supersedes.push(...currentItemRoots[item])
for (const msgID of currentItemRoots[item]) {
toDeleteFromItemRoots.set(msgID, item)
}
} }
} }
}
const data = { add: [value], del: [], supersedes } const data = { add: [value], del: [], supersedes }
peer.db.feed.publish( peer.db.feed.publish(
{ account: loadedAccountID, domain, data }, { account: loadedAccountID, domain, data },
(err, rec) => {
// prettier-ignore
if (err) return cb(new Error(`Failed to create msg when adding to Set "${subdomain}"`, { cause: err }))
for (const [msgID, item] of toDeleteFromItemRoots) {
itemRoots.del(subdomain, item, msgID)
}
// @ts-ignore // @ts-ignore
cb(null, true) (err, rec) => {
watch.set({ event: 'add', subdomain, value }) // prettier-ignore
} if (err) return cb(new Error(`Failed to create msg when adding to Set "${subdomain}"`, { cause: err }))
) for (const [msgID, item] of toDeleteFromItemRoots) {
itemRoots.del(subdomain, item, msgID)
}
// @ts-ignore
cb(null, true)
watch.set({ event: 'add', subdomain, value })
}
)
})
}) })
} }
@ -404,99 +425,126 @@ function initSet(peer, config) {
loaded(() => { loaded(() => {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot add to Set before loading') assert(!!loadedAccountID, 'Cannot add to Set before loading')
const currentSet = readSet(loadedAccountID, subdomain) readSet(loadedAccountID, subdomain, (err, currentSet) => {
if (!currentSet.has(value)) return cb(null, false) if (err) return cb(err)
const domain = fromSubdomain(subdomain)
// Populate supersedes if (!currentSet.has(value)) return cb(null, false)
const supersedes = [] const domain = fromSubdomain(subdomain)
const currentItemRoots = itemRoots.getAll(subdomain)
for (const item in currentItemRoots) { // Populate supersedes
if (item === value || !currentSet.has(item)) { const supersedes = []
supersedes.push(...currentItemRoots[item]) const currentItemRoots = itemRoots.getAll(subdomain)
for (const item in currentItemRoots) {
if (item === value || !currentSet.has(item)) {
supersedes.push(...currentItemRoots[item])
}
} }
}
const data = { add: [], del: [value], supersedes } const data = { add: [], del: [value], supersedes }
peer.db.feed.publish( peer.db.feed.publish(
{ account: loadedAccountID, domain, data }, { account: loadedAccountID, domain, data },
(err, rec) => {
// prettier-ignore
if (err) return cb(new Error(`Failed to create msg when deleting from Set "${subdomain}"`, { cause: err }))
// @ts-ignore // @ts-ignore
cb(null, true) (err, rec) => {
watch.set({ event: 'del', subdomain, value }) // prettier-ignore
} if (err) return cb(new Error(`Failed to create msg when deleting from Set "${subdomain}"`, { cause: err }))
) // @ts-ignore
cb(null, true)
watch.set({ event: 'del', subdomain, value })
}
)
})
}) })
} }
/** /**
* @param {string} subdomain * @param {string} subdomain
* @param {any} value * @param {any} value
* @param {string=} id * @param {string?} id
* @param {CB<boolean>} cb
*/ */
function has(subdomain, value, id) { function has(subdomain, value, id, cb) {
assert(!!loadedAccountID, 'Cannot call has() before loading') assert(!!loadedAccountID, 'Cannot call has() before loading')
const set = readSet(id ?? loadedAccountID, subdomain) readSet(id ?? loadedAccountID, subdomain, (err, set) => {
return set.has(value) if (err) return cb(err)
return cb(null, set.has(value))
})
} }
/** /**
* @param {string} subdomain * @param {string} subdomain
* @param {string=} id * @param {string?} id
* @param {CB<Array<any>>} cb
*/ */
function values(subdomain, id) { function values(subdomain, id, cb) {
assert(!!loadedAccountID, 'Cannot call values() before loading') assert(!!loadedAccountID, 'Cannot call values() before loading')
const set = readSet(id ?? loadedAccountID, subdomain) readSet(id ?? loadedAccountID, subdomain, (err, set) => {
return [...set] if (err) return cb(err)
return cb(null, [...set])
})
} }
/** /**
* @public * @public
* @param {string} tangleID * @param {string} tangleID
* @returns {number} * @param {CB<number>} cb
*/ */
function minGhostDepth(tangleID) { function minGhostDepth(tangleID, cb) {
return Math.max(0, minRequiredDepth(tangleID) - ghostSpan) minRequiredDepth(tangleID, (err, minDepth) => {
if (err) return cb(err)
return cb(null, Math.max(0, minDepth - ghostSpan))
})
} }
/** /**
* @public * @public
* @param {string} tangleID * @param {string} tangleID
* @returns {number} * @param {CB<number>} cb
*/ */
function minRequiredDepth(tangleID) { function minRequiredDepth(tangleID, cb) {
const tangle = peer.db.getTangle(tangleID) // @ts-ignore
peer.db.getTangle(tangleID, (err, tangle) => {
if (err) return cb(err)
// prettier-ignore // prettier-ignore
if (!tangle) return 0 if (!tangle) return cb(null, 0)
// prettier-ignore // prettier-ignore
if (!MsgV4.isMoot(tangle.root)) throw new Error(`Tangle "${tangleID}" is not a moot`) if (!MsgV4.isMoot(tangle.root)) return cb(Error(`Tangle "${tangleID}" is not a moot`))
const domain = tangle.root.metadata.domain const domain = tangle.root.metadata.domain
// prettier-ignore // prettier-ignore
if (!domain.startsWith(PREFIX)) throw new Error(`Tangle "${tangleID}" is not a Set moot`) if (!domain.startsWith(PREFIX)) return cb(Error(`Tangle "${tangleID}" is not a Set moot`))
// Discover item roots // Discover item roots
const itemRoots = new Set() const itemRoots = new Set()
const msgIDs = tangle.topoSort() pull(
for (const msgID of msgIDs) { pull.values(tangle.topoSort()),
const msg = peer.db.get(msgID) pull.asyncMap((msgID, cb) => {
if (!msg?.data) continue peer.db.getRecord(msgID, cb)
for (const supersededMsgID of msg.data.supersedes) { }),
itemRoots.delete(supersededMsgID) pull.drain((rec) => {
} const { msg, id: msgID } = rec
itemRoots.add(msgID) if (!msg?.data) return
} for (const supersededMsgID of msg.data.supersedes) {
itemRoots.delete(supersededMsgID)
}
itemRoots.add(msgID)
}, (err) => {
if (err) return cb(Error("Failed to iterate over tangle messages in minRequiredDepth()", { cause: err }))
// Get minimum depth of all item roots
let minDepth = Infinity
for (const msgID of itemRoots) {
const depth = tangle.getDepth(msgID)
if (depth < minDepth) minDepth = depth
}
return minDepth // Get minimum depth of all item roots
let minDepth = Infinity
for (const msgID of itemRoots) {
const depth = tangle.getDepth(msgID)
if (depth < minDepth) minDepth = depth
}
return cb(null, minDepth)
})
)
})
} }
/** /**
@ -514,20 +562,27 @@ function initSet(peer, config) {
* @public * @public
* @param {MsgID} ghostableMsgID * @param {MsgID} ghostableMsgID
* @param {MsgID} tangleID * @param {MsgID} tangleID
* @param {(err: Error | null, ghostable?: boolean) => void} cb
*/ */
function isGhostable(ghostableMsgID, tangleID) { function isGhostable(ghostableMsgID, tangleID, cb) {
if (ghostableMsgID === tangleID) return false if (ghostableMsgID === tangleID) return cb(null, false)
const msg = peer.db.get(ghostableMsgID) // @ts-ignore
peer.db.get(ghostableMsgID, (err, msg) => {
if (err) return cb(err)
// prettier-ignore // prettier-ignore
if (!msg) throw new Error(`isGhostable() msgID "${ghostableMsgID}" does not exist in the database`) if (!msg) return cb(Error(`isGhostable() msgID "${ghostableMsgID}" does not exist in the database`))
const minItemRootDepth = minRequiredDepth(tangleID) minRequiredDepth(tangleID, (err, minItemRootDepth) => {
const minGhostDepth = minItemRootDepth - ghostSpan if (err) return cb(err)
const msgDepth = msg.metadata.tangles[tangleID].depth
if (minGhostDepth <= msgDepth && msgDepth < minItemRootDepth) return true const minGhostDepth = minItemRootDepth - ghostSpan
return false const msgDepth = msg.metadata.tangles[tangleID].depth
if (minGhostDepth <= msgDepth && msgDepth < minItemRootDepth) return cb(null, true)
return cb(null, false)
})
})
} }
/** /**
@ -562,31 +617,37 @@ function initSet(peer, config) {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot squeeze Set before loading') assert(!!loadedAccountID, 'Cannot squeeze Set before loading')
const potential = _squeezePotential(subdomain) _squeezePotential(subdomain, (err, potential) => {
if (potential < 1) return cb(null, false) if (err) return cb(err)
loaded(() => { if (potential < 1) return cb(null, false)
// TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot squeeze Set before loading')
const domain = fromSubdomain(subdomain)
const currentSet = readSet(loadedAccountID, subdomain)
const supersedes = [] loaded(() => {
const currentItemRoots = itemRoots.getAll(subdomain) // TODO this error needs to be put into the `cb`, not thrown
for (const item in currentItemRoots) { assert(!!loadedAccountID, 'Cannot squeeze Set before loading')
supersedes.push(...currentItemRoots[item]) const domain = fromSubdomain(subdomain)
} readSet(loadedAccountID, subdomain, (err, currentSet) => {
if (err) return cb(err)
const data = { add: [...currentSet], del: [], supersedes } const supersedes = []
peer.db.feed.publish( const currentItemRoots = itemRoots.getAll(subdomain)
{ account: loadedAccountID, domain, data }, for (const item in currentItemRoots) {
(err, rec) => { supersedes.push(...currentItemRoots[item])
// prettier-ignore }
if (err) return cb(new Error(`Failed to create msg when squeezing Set "${subdomain}"`, { cause: err }))
// @ts-ignore const data = { add: [...currentSet], del: [], supersedes }
cb(null, true) peer.db.feed.publish(
} { account: loadedAccountID, domain, data },
) // @ts-ignore
(err, rec) => {
// prettier-ignore
if (err) return cb(new Error(`Failed to create msg when squeezing Set "${subdomain}"`, { cause: err }))
// @ts-ignore
cb(null, true)
}
)
})
})
}) })
} }
//#endregion //#endregion

View File

@ -25,12 +25,14 @@
"node": ">=16" "node": ">=16"
}, },
"dependencies": { "dependencies": {
"obz": "~1.1.0",
"pull-stream": "^3.7.0"
}, },
"devDependencies": { "devDependencies": {
"@types/pull-stream": "^3.6.7",
"bs58": "^5.0.0", "bs58": "^5.0.0",
"c8": "7", "c8": "7",
"obz": "~1.1.0", "ppppp-db": "https://codeberg.org/pzp/pzp-db/archive/master.tar.gz",
"ppppp-db": "github:staltz/ppppp-db#667b33779d98aff12a9b0cd2d7c80469a95cd04e",
"ppppp-caps": "github:staltz/ppppp-caps#93fa810b9a40b78aef4872d4c2a8412cccb52929", "ppppp-caps": "github:staltz/ppppp-caps#93fa810b9a40b78aef4872d4c2a8412cccb52929",
"ppppp-keypair": "github:staltz/ppppp-keypair#61ef4420578f450dc2cc7b1efc1c5a691a871c74", "ppppp-keypair": "github:staltz/ppppp-keypair#61ef4420578f450dc2cc7b1efc1c5a691a871c74",
"rimraf": "^4.4.0", "rimraf": "^4.4.0",

View File

@ -37,9 +37,9 @@ test('setup', async (t) => {
assert.equal(peer.set.getGhostSpan(), 4, 'getGhostSpan') assert.equal(peer.set.getGhostSpan(), 4, 'getGhostSpan')
}) })
function lastMsgID() { async function lastMsgID() {
let last let last
for (const item of peer.db.records()) { for await (const item of peer.db.records()) {
last = item last = item
} }
return last.id return last.id
@ -56,10 +56,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
const stopWatch = peer.set.watch((ev) => actualWatch.push(ev)) const stopWatch = peer.set.watch((ev) => actualWatch.push(ev))
// Add 1st // Add 1st
assert.equal(peer.set.has('follows', '1st'), false, 'doesnt have 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), false, 'doesnt have 1st')
assert(await p(peer.set.add)('follows', '1st'), 'add 1st') assert(await p(peer.set.add)('follows', '1st'), 'add 1st')
assert.equal(peer.set.has('follows', '1st'), true, 'has 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), true, 'has 1st')
add1 = lastMsgID() add1 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '1st': [add1] }, { '1st': [add1] },
@ -67,10 +67,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
) )
// Add 2nd // Add 2nd
assert.equal(peer.set.has('follows', '2nd'), false, 'doesnt have 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), false, 'doesnt have 2nd')
assert(await p(peer.set.add)('follows', '2nd'), 'add 2nd') assert(await p(peer.set.add)('follows', '2nd'), 'add 2nd')
assert.equal(peer.set.has('follows', '2nd'), true, 'has 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), true, 'has 2nd')
add2 = lastMsgID() add2 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '1st': [add1], '2nd': [add2] }, { '1st': [add1], '2nd': [add2] },
@ -78,10 +78,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
) )
// Del 1st // Del 1st
assert.equal(peer.set.has('follows', '1st'), true, 'has 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), true, 'has 1st')
assert(await p(peer.set.del)('follows', '1st'), 'del 1st') assert(await p(peer.set.del)('follows', '1st'), 'del 1st')
assert.equal(peer.set.has('follows', '1st'), false, 'doesnt have 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), false, 'doesnt have 1st')
del1 = lastMsgID() del1 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '1st': [del1], '2nd': [add2] }, { '1st': [del1], '2nd': [add2] },
@ -93,10 +93,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
assert.deepEqual(actualWatch, expectedWatch, 'watch() events') assert.deepEqual(actualWatch, expectedWatch, 'watch() events')
// Add 3rd // Add 3rd
assert.equal(peer.set.has('follows', '3rd'), false, 'doesnt have 3rd') assert.equal(await p(peer.set.has)('follows', '3rd', null), false, 'doesnt have 3rd')
assert(await p(peer.set.add)('follows', '3rd'), 'add 3rd') assert(await p(peer.set.add)('follows', '3rd'), 'add 3rd')
assert.equal(peer.set.has('follows', '3rd'), true, 'has 3rd') assert.equal(await p(peer.set.has)('follows', '3rd', null), true, 'has 3rd')
add3 = lastMsgID() add3 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '3rd': [add3], '2nd': [add2] }, { '3rd': [add3], '2nd': [add2] },
@ -104,10 +104,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
) )
// Del 2nd // Del 2nd
assert.equal(peer.set.has('follows', '2nd'), true, 'has 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), true, 'has 2nd')
assert(await p(peer.set.del)('follows', '2nd'), 'del 2nd') // msg seq 4 assert(await p(peer.set.del)('follows', '2nd'), 'del 2nd') // msg seq 4
assert.equal(peer.set.has('follows', '2nd'), false, 'doesnt have 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), false, 'doesnt have 2nd')
del2 = lastMsgID() del2 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '3rd': [add3], '2nd': [del2] }, { '3rd': [add3], '2nd': [del2] },
@ -120,7 +120,7 @@ test('Set add(), del(), has(), watch()', async (t) => {
false, false,
'del 2nd idempotent' 'del 2nd idempotent'
) )
assert.equal(peer.set.has('follows', '2nd'), false, 'doesnt have 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), false, 'doesnt have 2nd')
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '3rd': [add3], '2nd': [del2] }, { '3rd': [add3], '2nd': [del2] },
@ -131,12 +131,12 @@ test('Set add(), del(), has(), watch()', async (t) => {
let add4, add5 let add4, add5
test('Set values()', async (t) => { test('Set values()', async (t) => {
assert(await p(peer.set.add)('follows', '4th'), 'add 4th') assert(await p(peer.set.add)('follows', '4th'), 'add 4th')
add4 = lastMsgID() add4 = await lastMsgID()
assert(await p(peer.set.add)('follows', '5th'), 'add 5th') assert(await p(peer.set.add)('follows', '5th'), 'add 5th')
add5 = lastMsgID() add5 = await lastMsgID()
const expected = new Set(['3rd', '4th', '5th']) const expected = new Set(['3rd', '4th', '5th'])
for (const item of peer.set.values('follows')) { for (const item of await p(peer.set.values)('follows', null)) {
assert.equal(expected.has(item), true, 'values() item') assert.equal(expected.has(item), true, 'values() item')
expected.delete(item) expected.delete(item)
} }
@ -150,12 +150,12 @@ test('predsl Set squeeze', async (t) => {
'itemRoots before squeeze' 'itemRoots before squeeze'
) )
assert.equal(peer.set._squeezePotential('follows'), 3, 'squeezePotential=3') assert.equal(await p(peer.set._squeezePotential)('follows'), 3, 'squeezePotential=3')
assert.equal(await p(peer.set.squeeze)('follows'), true, 'squeezed') assert.equal(await p(peer.set.squeeze)('follows'), true, 'squeezed')
const squeezed = lastMsgID() const squeezed = await lastMsgID()
assert.equal(peer.set._squeezePotential('follows'), 0, 'squeezePotential=0') assert.equal(await p(peer.set._squeezePotential)('follows'), 0, 'squeezePotential=0')
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
@ -168,17 +168,17 @@ test('predsl Set squeeze', async (t) => {
false, false,
'squeeze again idempotent' 'squeeze again idempotent'
) )
const squeezed2 = lastMsgID() const squeezed2 = await lastMsgID()
assert.equal(squeezed, squeezed2, 'squeezed msgID is same') assert.equal(squeezed, squeezed2, 'squeezed msgID is same')
}) })
test('Set isGhostable', (t) => { test('Set isGhostable', async (t) => {
const moot = MsgV4.createMoot(aliceID, 'set_v1__follows', aliceKeypair) const moot = MsgV4.createMoot(aliceID, 'set_v1__follows', aliceKeypair)
const mootID = MsgV4.getMsgID(moot) const mootID = MsgV4.getMsgID(moot)
assert.equal(mootID, peer.set.getFeedID('follows'), 'getFeedID') assert.equal(mootID, peer.set.getFeedID('follows'), 'getFeedID')
const tangle = peer.db.getTangle(mootID) const tangle = await p(peer.db.getTangle)(mootID)
const msgIDs = tangle.topoSort() const msgIDs = tangle.topoSort()
const itemRoots = peer.set._getItemRoots('follows') const itemRoots = peer.set._getItemRoots('follows')
@ -190,15 +190,15 @@ test('Set isGhostable', (t) => {
// Remember from the setup, that ghostSpan=4 // Remember from the setup, that ghostSpan=4
assert.equal(msgIDs.length, 9) assert.equal(msgIDs.length, 9)
assert.equal(peer.set.isGhostable(msgIDs[0], mootID), false) // moot assert.equal(await p(peer.set.isGhostable)(msgIDs[0], mootID), false) // moot
assert.equal(peer.set.isGhostable(msgIDs[1], mootID), false) assert.equal(await p(peer.set.isGhostable)(msgIDs[1], mootID), false)
assert.equal(peer.set.isGhostable(msgIDs[2], mootID), false) assert.equal(await p(peer.set.isGhostable)(msgIDs[2], mootID), false)
assert.equal(peer.set.isGhostable(msgIDs[3], mootID), false) assert.equal(await p(peer.set.isGhostable)(msgIDs[3], mootID), false)
assert.equal(peer.set.isGhostable(msgIDs[4], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[4], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[5], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[5], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[6], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[6], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[7], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[7], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[8], mootID), false) // item root assert.equal(await p(peer.set.isGhostable)(msgIDs[8], mootID), false) // item root
}) })
test('teardown', async (t) => { test('teardown', async (t) => {