Merge pull request 'Adapt to new async db functions' (#1) from async-db into master

Reviewed-on: https://codeberg.org/pzp/pzp-set/pulls/1
This commit is contained in:
Powersource 2024-04-27 21:57:50 +00:00
commit c2351a4895
7 changed files with 282 additions and 233 deletions

View File

@ -1,25 +0,0 @@
name: CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
matrix:
node-version: [18.x, 20.x]
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- run: npm install
- run: npm test

13
.woodpecker.yaml Normal file
View File

@ -0,0 +1,13 @@
matrix:
NODE_VERSION:
- 18
- 20
steps:
test:
when:
event: [push]
image: node:${NODE_VERSION}
commands:
- npm install
- npm test

View File

@ -1,9 +1,7 @@
**Work in progress** # pzp-set
## Installation ## Installation
We're not on npm yet. In your package.json, include this as
```js ```js
"ppppp-set": "github:staltz/ppppp-set" npm install pzp-set
``` ```

View File

@ -1,11 +1,12 @@
const Obz = require('obz') const Obz = require('obz')
const MsgV4 = require('ppppp-db/msg-v4') const MsgV4 = require('pzp-db/msg-v4')
const pull = require('pull-stream')
const PREFIX = 'set_v1__' const PREFIX = 'set_v1__'
/** /**
* @typedef {ReturnType<import('ppppp-db').init>} PPPPPDB * @typedef {ReturnType<import('pzp-db').init>} pzpDB
* @typedef {import('ppppp-db').RecPresent} RecPresent * @typedef {import('pzp-db').RecPresent} RecPresent
* @typedef {{ * @typedef {{
* hook: ( * hook: (
* cb: ( * cb: (
@ -33,7 +34,7 @@ const PREFIX = 'set_v1__'
/** /**
* @template [T = any] * @template [T = any]
* @typedef {import('ppppp-db/msg-v4').Msg<T>} Msg<T> * @typedef {import('pzp-db/msg-v4').Msg<T>} Msg<T>
*/ */
/** /**
@ -75,7 +76,7 @@ function assert(check, message) {
} }
/** /**
* @param {{ db: PPPPPDB, close: ClosableHook }} peer * @param {{ db: pzpDB, close: ClosableHook }} peer
* @param {Config} config * @param {Config} config
*/ */
function initSet(peer, config) { function initSet(peer, config) {
@ -191,23 +192,35 @@ function initSet(peer, config) {
/** /**
* @param {string} id * @param {string} id
* @param {string} subdomain * @param {string} subdomain
* @param {(err: Error | null, tangle?: any) => void} cb
*/ */
function readSet(id, subdomain) { function readSet(id, subdomain, cb) {
const domain = fromSubdomain(subdomain) const domain = fromSubdomain(subdomain)
const mootID = MsgV4.getMootID(id, domain) const mootID = MsgV4.getMootID(id, domain)
const tangle = peer.db.getTangle(mootID) // @ts-ignore
if (!tangle) return new Set() peer.db.getTangle(mootID, (err, tangle) => {
if (err) return cb(err)
if (!tangle) return cb(null, new Set())
const msgIDs = tangle.topoSort() const msgIDs = tangle.topoSort()
const set = new Set() const set = new Set()
for (const msgID of msgIDs) { pull(
const msg = peer.db.get(msgID) pull.values(msgIDs),
pull.asyncMap((msgID, cb) => {
peer.db.get(msgID, cb)
}),
pull.drain((msg) => {
if (isValidSetMsg(msg)) { if (isValidSetMsg(msg)) {
const { add, del } = msg.data const { add, del } = msg.data
for (const value of add) set.add(value) for (const value of add) set.add(value)
for (const value of del) set.delete(value) for (const value of del) set.delete(value)
} }
} }, (err) => {
return set // prettier-ignore
if (err) return cb(Error("Stream failed on readSet()", { cause: err }))
return cb(null, set)
})
)
})
} }
/** /**
@ -278,15 +291,18 @@ function initSet(peer, config) {
/** /**
* @param {string} subdomain * @param {string} subdomain
* @param {CB<number>} cb
*/ */
function _squeezePotential(subdomain) { function _squeezePotential(subdomain, cb) {
// prettier-ignore // prettier-ignore
if (!loadedAccountID) throw new Error('Cannot squeeze potential before loading') if (!loadedAccountID) return cb(Error('Cannot squeeze potential before loading'))
// TODO: improve this so that the squeezePotential is the size of the // TODO: improve this so that the squeezePotential is the size of the
// tangle suffix built as a slice from the fieldRoots // tangle suffix built as a slice from the fieldRoots
const mootID = MsgV4.getMootID(loadedAccountID, fromSubdomain(subdomain)) const mootID = MsgV4.getMootID(loadedAccountID, fromSubdomain(subdomain))
const tangle = peer.db.getTangle(mootID) // @ts-ignore
if (!tangle) return 0 peer.db.getTangle(mootID, (err, tangle) => {
if (err) return cb(err)
if (!tangle) return cb(null, 0)
const maxDepth = tangle.maxDepth const maxDepth = tangle.maxDepth
const currentItemRoots = itemRoots.getAll(subdomain) const currentItemRoots = itemRoots.getAll(subdomain)
let minDepth = Infinity let minDepth = Infinity
@ -296,7 +312,8 @@ function initSet(peer, config) {
if (depth < minDepth) minDepth = depth if (depth < minDepth) minDepth = depth
} }
} }
return maxDepth - minDepth return cb(null, maxDepth - minDepth)
})
} }
//#endregion //#endregion
@ -320,8 +337,8 @@ function initSet(peer, config) {
// microtask is needed to ensure that loadPromise is assigned BEFORE this // microtask is needed to ensure that loadPromise is assigned BEFORE this
// body is executed (which in turn does inversion of control when `cb` or // body is executed (which in turn does inversion of control when `cb` or
// `resolve` is called) // `resolve` is called)
queueMicrotask(() => { queueMicrotask(async () => {
for (const rec of peer.db.records()) { for await (const rec of peer.db.records()) {
if (!rec.msg) continue if (!rec.msg) continue
maybeLearnAboutSet(rec.id, rec.msg) maybeLearnAboutSet(rec.id, rec.msg)
} }
@ -354,7 +371,9 @@ function initSet(peer, config) {
loaded(() => { loaded(() => {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot add to Set before loading') assert(!!loadedAccountID, 'Cannot add to Set before loading')
const currentSet = readSet(loadedAccountID, subdomain) readSet(loadedAccountID, subdomain, (err, currentSet) => {
if (err) return cb(err)
if (currentSet.has(value)) return cb(null, false) if (currentSet.has(value)) return cb(null, false)
const domain = fromSubdomain(subdomain) const domain = fromSubdomain(subdomain)
@ -376,6 +395,7 @@ function initSet(peer, config) {
const data = { add: [value], del: [], supersedes } const data = { add: [value], del: [], supersedes }
peer.db.feed.publish( peer.db.feed.publish(
{ account: loadedAccountID, domain, data }, { account: loadedAccountID, domain, data },
// @ts-ignore
(err, rec) => { (err, rec) => {
// prettier-ignore // prettier-ignore
if (err) return cb(new Error(`Failed to create msg when adding to Set "${subdomain}"`, { cause: err })) if (err) return cb(new Error(`Failed to create msg when adding to Set "${subdomain}"`, { cause: err }))
@ -388,6 +408,7 @@ function initSet(peer, config) {
} }
) )
}) })
})
} }
/** /**
@ -404,7 +425,9 @@ function initSet(peer, config) {
loaded(() => { loaded(() => {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot add to Set before loading') assert(!!loadedAccountID, 'Cannot add to Set before loading')
const currentSet = readSet(loadedAccountID, subdomain) readSet(loadedAccountID, subdomain, (err, currentSet) => {
if (err) return cb(err)
if (!currentSet.has(value)) return cb(null, false) if (!currentSet.has(value)) return cb(null, false)
const domain = fromSubdomain(subdomain) const domain = fromSubdomain(subdomain)
@ -420,6 +443,7 @@ function initSet(peer, config) {
const data = { add: [], del: [value], supersedes } const data = { add: [], del: [value], supersedes }
peer.db.feed.publish( peer.db.feed.publish(
{ account: loadedAccountID, domain, data }, { account: loadedAccountID, domain, data },
// @ts-ignore
(err, rec) => { (err, rec) => {
// prettier-ignore // prettier-ignore
if (err) return cb(new Error(`Failed to create msg when deleting from Set "${subdomain}"`, { cause: err })) if (err) return cb(new Error(`Failed to create msg when deleting from Set "${subdomain}"`, { cause: err }))
@ -429,65 +453,86 @@ function initSet(peer, config) {
} }
) )
}) })
})
} }
/** /**
* @param {string} subdomain * @param {string} subdomain
* @param {any} value * @param {any} value
* @param {string=} id * @param {string?} id
* @param {CB<boolean>} cb
*/ */
function has(subdomain, value, id) { function has(subdomain, value, id, cb) {
assert(!!loadedAccountID, 'Cannot call has() before loading') assert(!!loadedAccountID, 'Cannot call has() before loading')
const set = readSet(id ?? loadedAccountID, subdomain) readSet(id ?? loadedAccountID, subdomain, (err, set) => {
return set.has(value) if (err) return cb(err)
return cb(null, set.has(value))
})
} }
/** /**
* @param {string} subdomain * @param {string} subdomain
* @param {string=} id * @param {string?} id
* @param {CB<Array<any>>} cb
*/ */
function values(subdomain, id) { function values(subdomain, id, cb) {
assert(!!loadedAccountID, 'Cannot call values() before loading') assert(!!loadedAccountID, 'Cannot call values() before loading')
const set = readSet(id ?? loadedAccountID, subdomain) readSet(id ?? loadedAccountID, subdomain, (err, set) => {
return [...set] if (err) return cb(err)
return cb(null, [...set])
})
} }
/** /**
* @public * @public
* @param {string} tangleID * @param {string} tangleID
* @returns {number} * @param {CB<number>} cb
*/ */
function minGhostDepth(tangleID) { function minGhostDepth(tangleID, cb) {
return Math.max(0, minRequiredDepth(tangleID) - ghostSpan) minRequiredDepth(tangleID, (err, minDepth) => {
if (err) return cb(err)
return cb(null, Math.max(0, minDepth - ghostSpan))
})
} }
/** /**
* @public * @public
* @param {string} tangleID * @param {string} tangleID
* @returns {number} * @param {CB<number>} cb
*/ */
function minRequiredDepth(tangleID) { function minRequiredDepth(tangleID, cb) {
const tangle = peer.db.getTangle(tangleID) // @ts-ignore
peer.db.getTangle(tangleID, (err, tangle) => {
if (err) return cb(err)
// prettier-ignore // prettier-ignore
if (!tangle) return 0 if (!tangle) return cb(null, 0)
// prettier-ignore // prettier-ignore
if (!MsgV4.isMoot(tangle.root)) throw new Error(`Tangle "${tangleID}" is not a moot`) if (!MsgV4.isMoot(tangle.root)) return cb(Error(`Tangle "${tangleID}" is not a moot`))
const domain = tangle.root.metadata.domain const domain = tangle.root.metadata.domain
// prettier-ignore // prettier-ignore
if (!domain.startsWith(PREFIX)) throw new Error(`Tangle "${tangleID}" is not a Set moot`) if (!domain.startsWith(PREFIX)) return cb(Error(`Tangle "${tangleID}" is not a Set moot`))
// Discover item roots // Discover item roots
const itemRoots = new Set() const itemRoots = new Set()
const msgIDs = tangle.topoSort() pull(
for (const msgID of msgIDs) { pull.values(tangle.topoSort()),
const msg = peer.db.get(msgID) pull.asyncMap((msgID, cb) => {
if (!msg?.data) continue peer.db.getRecord(msgID, cb)
}),
pull.drain((rec) => {
const { msg, id: msgID } = rec
if (!msg?.data) return
for (const supersededMsgID of msg.data.supersedes) { for (const supersededMsgID of msg.data.supersedes) {
itemRoots.delete(supersededMsgID) itemRoots.delete(supersededMsgID)
} }
itemRoots.add(msgID) itemRoots.add(msgID)
} }, (err) => {
if (err) return cb(Error("Failed to iterate over tangle messages in minRequiredDepth()", { cause: err }))
// Get minimum depth of all item roots // Get minimum depth of all item roots
let minDepth = Infinity let minDepth = Infinity
@ -496,7 +541,10 @@ function initSet(peer, config) {
if (depth < minDepth) minDepth = depth if (depth < minDepth) minDepth = depth
} }
return minDepth return cb(null, minDepth)
})
)
})
} }
/** /**
@ -514,20 +562,27 @@ function initSet(peer, config) {
* @public * @public
* @param {MsgID} ghostableMsgID * @param {MsgID} ghostableMsgID
* @param {MsgID} tangleID * @param {MsgID} tangleID
* @param {(err: Error | null, ghostable?: boolean) => void} cb
*/ */
function isGhostable(ghostableMsgID, tangleID) { function isGhostable(ghostableMsgID, tangleID, cb) {
if (ghostableMsgID === tangleID) return false if (ghostableMsgID === tangleID) return cb(null, false)
const msg = peer.db.get(ghostableMsgID) // @ts-ignore
peer.db.get(ghostableMsgID, (err, msg) => {
if (err) return cb(err)
// prettier-ignore // prettier-ignore
if (!msg) throw new Error(`isGhostable() msgID "${ghostableMsgID}" does not exist in the database`) if (!msg) return cb(Error(`isGhostable() msgID "${ghostableMsgID}" does not exist in the database`))
minRequiredDepth(tangleID, (err, minItemRootDepth) => {
if (err) return cb(err)
const minItemRootDepth = minRequiredDepth(tangleID)
const minGhostDepth = minItemRootDepth - ghostSpan const minGhostDepth = minItemRootDepth - ghostSpan
const msgDepth = msg.metadata.tangles[tangleID].depth const msgDepth = msg.metadata.tangles[tangleID].depth
if (minGhostDepth <= msgDepth && msgDepth < minItemRootDepth) return true if (minGhostDepth <= msgDepth && msgDepth < minItemRootDepth) return cb(null, true)
return false return cb(null, false)
})
})
} }
/** /**
@ -562,14 +617,17 @@ function initSet(peer, config) {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot squeeze Set before loading') assert(!!loadedAccountID, 'Cannot squeeze Set before loading')
const potential = _squeezePotential(subdomain) _squeezePotential(subdomain, (err, potential) => {
if (err) return cb(err)
if (potential < 1) return cb(null, false) if (potential < 1) return cb(null, false)
loaded(() => { loaded(() => {
// TODO this error needs to be put into the `cb`, not thrown // TODO this error needs to be put into the `cb`, not thrown
assert(!!loadedAccountID, 'Cannot squeeze Set before loading') assert(!!loadedAccountID, 'Cannot squeeze Set before loading')
const domain = fromSubdomain(subdomain) const domain = fromSubdomain(subdomain)
const currentSet = readSet(loadedAccountID, subdomain) readSet(loadedAccountID, subdomain, (err, currentSet) => {
if (err) return cb(err)
const supersedes = [] const supersedes = []
const currentItemRoots = itemRoots.getAll(subdomain) const currentItemRoots = itemRoots.getAll(subdomain)
@ -580,6 +638,7 @@ function initSet(peer, config) {
const data = { add: [...currentSet], del: [], supersedes } const data = { add: [...currentSet], del: [], supersedes }
peer.db.feed.publish( peer.db.feed.publish(
{ account: loadedAccountID, domain, data }, { account: loadedAccountID, domain, data },
// @ts-ignore
(err, rec) => { (err, rec) => {
// prettier-ignore // prettier-ignore
if (err) return cb(new Error(`Failed to create msg when squeezing Set "${subdomain}"`, { cause: err })) if (err) return cb(new Error(`Failed to create msg when squeezing Set "${subdomain}"`, { cause: err }))
@ -588,6 +647,8 @@ function initSet(peer, config) {
} }
) )
}) })
})
})
} }
//#endregion //#endregion

View File

@ -1,13 +1,13 @@
{ {
"name": "ppppp-set", "name": "pzp-set",
"version": "1.0.0", "version": "0.0.1",
"description": "Set data structure over append-only logs with pruning", "description": "Set data structure over append-only logs with pruning",
"author": "Andre Staltz <contact@staltz.com>", "author": "Andre Staltz <contact@staltz.com>",
"license": "MIT", "license": "MIT",
"homepage": "https://github.com/staltz/ppppp-set", "homepage": "https://codeberg.org/pzp/pzp-set",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git@github.com:staltz/ppppp-set.git" "url": "git@codeberg.org:pzp/pzp-set.git"
}, },
"main": "index.js", "main": "index.js",
"files": [ "files": [
@ -25,14 +25,16 @@
"node": ">=16" "node": ">=16"
}, },
"dependencies": { "dependencies": {
"obz": "~1.1.0",
"pull-stream": "^3.7.0"
}, },
"devDependencies": { "devDependencies": {
"@types/pull-stream": "^3.6.7",
"bs58": "^5.0.0", "bs58": "^5.0.0",
"c8": "7", "c8": "7",
"obz": "~1.1.0", "pzp-db": "^1.0.1",
"ppppp-db": "github:staltz/ppppp-db#667b33779d98aff12a9b0cd2d7c80469a95cd04e", "pzp-caps": "^1.0.0",
"ppppp-caps": "github:staltz/ppppp-caps#93fa810b9a40b78aef4872d4c2a8412cccb52929", "pzp-keypair": "^1.0.0",
"ppppp-keypair": "github:staltz/ppppp-keypair#61ef4420578f450dc2cc7b1efc1c5a691a871c74",
"rimraf": "^4.4.0", "rimraf": "^4.4.0",
"secret-stack": "~8.1.0", "secret-stack": "~8.1.0",
"secret-handshake-ext": "0.0.10", "secret-handshake-ext": "0.0.10",

View File

@ -3,12 +3,12 @@ const assert = require('node:assert')
const path = require('node:path') const path = require('node:path')
const os = require('node:os') const os = require('node:os')
const rimraf = require('rimraf') const rimraf = require('rimraf')
const MsgV4 = require('ppppp-db/msg-v4') const MsgV4 = require('pzp-db/msg-v4')
const p = require('node:util').promisify const p = require('node:util').promisify
const { createPeer } = require('./util') const { createPeer } = require('./util')
const Keypair = require('ppppp-keypair') const Keypair = require('pzp-keypair')
const DIR = path.join(os.tmpdir(), 'ppppp-set') const DIR = path.join(os.tmpdir(), 'pzp-set')
rimraf.sync(DIR) rimraf.sync(DIR)
const aliceKeypair = Keypair.generate('ed25519', 'alice') const aliceKeypair = Keypair.generate('ed25519', 'alice')
@ -37,9 +37,9 @@ test('setup', async (t) => {
assert.equal(peer.set.getGhostSpan(), 4, 'getGhostSpan') assert.equal(peer.set.getGhostSpan(), 4, 'getGhostSpan')
}) })
function lastMsgID() { async function lastMsgID() {
let last let last
for (const item of peer.db.records()) { for await (const item of peer.db.records()) {
last = item last = item
} }
return last.id return last.id
@ -56,10 +56,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
const stopWatch = peer.set.watch((ev) => actualWatch.push(ev)) const stopWatch = peer.set.watch((ev) => actualWatch.push(ev))
// Add 1st // Add 1st
assert.equal(peer.set.has('follows', '1st'), false, 'doesnt have 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), false, 'doesnt have 1st')
assert(await p(peer.set.add)('follows', '1st'), 'add 1st') assert(await p(peer.set.add)('follows', '1st'), 'add 1st')
assert.equal(peer.set.has('follows', '1st'), true, 'has 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), true, 'has 1st')
add1 = lastMsgID() add1 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '1st': [add1] }, { '1st': [add1] },
@ -67,10 +67,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
) )
// Add 2nd // Add 2nd
assert.equal(peer.set.has('follows', '2nd'), false, 'doesnt have 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), false, 'doesnt have 2nd')
assert(await p(peer.set.add)('follows', '2nd'), 'add 2nd') assert(await p(peer.set.add)('follows', '2nd'), 'add 2nd')
assert.equal(peer.set.has('follows', '2nd'), true, 'has 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), true, 'has 2nd')
add2 = lastMsgID() add2 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '1st': [add1], '2nd': [add2] }, { '1st': [add1], '2nd': [add2] },
@ -78,10 +78,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
) )
// Del 1st // Del 1st
assert.equal(peer.set.has('follows', '1st'), true, 'has 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), true, 'has 1st')
assert(await p(peer.set.del)('follows', '1st'), 'del 1st') assert(await p(peer.set.del)('follows', '1st'), 'del 1st')
assert.equal(peer.set.has('follows', '1st'), false, 'doesnt have 1st') assert.equal(await p(peer.set.has)('follows', '1st', null), false, 'doesnt have 1st')
del1 = lastMsgID() del1 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '1st': [del1], '2nd': [add2] }, { '1st': [del1], '2nd': [add2] },
@ -93,10 +93,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
assert.deepEqual(actualWatch, expectedWatch, 'watch() events') assert.deepEqual(actualWatch, expectedWatch, 'watch() events')
// Add 3rd // Add 3rd
assert.equal(peer.set.has('follows', '3rd'), false, 'doesnt have 3rd') assert.equal(await p(peer.set.has)('follows', '3rd', null), false, 'doesnt have 3rd')
assert(await p(peer.set.add)('follows', '3rd'), 'add 3rd') assert(await p(peer.set.add)('follows', '3rd'), 'add 3rd')
assert.equal(peer.set.has('follows', '3rd'), true, 'has 3rd') assert.equal(await p(peer.set.has)('follows', '3rd', null), true, 'has 3rd')
add3 = lastMsgID() add3 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '3rd': [add3], '2nd': [add2] }, { '3rd': [add3], '2nd': [add2] },
@ -104,10 +104,10 @@ test('Set add(), del(), has(), watch()', async (t) => {
) )
// Del 2nd // Del 2nd
assert.equal(peer.set.has('follows', '2nd'), true, 'has 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), true, 'has 2nd')
assert(await p(peer.set.del)('follows', '2nd'), 'del 2nd') // msg seq 4 assert(await p(peer.set.del)('follows', '2nd'), 'del 2nd') // msg seq 4
assert.equal(peer.set.has('follows', '2nd'), false, 'doesnt have 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), false, 'doesnt have 2nd')
del2 = lastMsgID() del2 = await lastMsgID()
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '3rd': [add3], '2nd': [del2] }, { '3rd': [add3], '2nd': [del2] },
@ -120,7 +120,7 @@ test('Set add(), del(), has(), watch()', async (t) => {
false, false,
'del 2nd idempotent' 'del 2nd idempotent'
) )
assert.equal(peer.set.has('follows', '2nd'), false, 'doesnt have 2nd') assert.equal(await p(peer.set.has)('follows', '2nd', null), false, 'doesnt have 2nd')
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
{ '3rd': [add3], '2nd': [del2] }, { '3rd': [add3], '2nd': [del2] },
@ -131,12 +131,12 @@ test('Set add(), del(), has(), watch()', async (t) => {
let add4, add5 let add4, add5
test('Set values()', async (t) => { test('Set values()', async (t) => {
assert(await p(peer.set.add)('follows', '4th'), 'add 4th') assert(await p(peer.set.add)('follows', '4th'), 'add 4th')
add4 = lastMsgID() add4 = await lastMsgID()
assert(await p(peer.set.add)('follows', '5th'), 'add 5th') assert(await p(peer.set.add)('follows', '5th'), 'add 5th')
add5 = lastMsgID() add5 = await lastMsgID()
const expected = new Set(['3rd', '4th', '5th']) const expected = new Set(['3rd', '4th', '5th'])
for (const item of peer.set.values('follows')) { for (const item of await p(peer.set.values)('follows', null)) {
assert.equal(expected.has(item), true, 'values() item') assert.equal(expected.has(item), true, 'values() item')
expected.delete(item) expected.delete(item)
} }
@ -150,12 +150,12 @@ test('predsl Set squeeze', async (t) => {
'itemRoots before squeeze' 'itemRoots before squeeze'
) )
assert.equal(peer.set._squeezePotential('follows'), 3, 'squeezePotential=3') assert.equal(await p(peer.set._squeezePotential)('follows'), 3, 'squeezePotential=3')
assert.equal(await p(peer.set.squeeze)('follows'), true, 'squeezed') assert.equal(await p(peer.set.squeeze)('follows'), true, 'squeezed')
const squeezed = lastMsgID() const squeezed = await lastMsgID()
assert.equal(peer.set._squeezePotential('follows'), 0, 'squeezePotential=0') assert.equal(await p(peer.set._squeezePotential)('follows'), 0, 'squeezePotential=0')
assert.deepEqual( assert.deepEqual(
peer.set._getItemRoots('follows'), peer.set._getItemRoots('follows'),
@ -168,17 +168,17 @@ test('predsl Set squeeze', async (t) => {
false, false,
'squeeze again idempotent' 'squeeze again idempotent'
) )
const squeezed2 = lastMsgID() const squeezed2 = await lastMsgID()
assert.equal(squeezed, squeezed2, 'squeezed msgID is same') assert.equal(squeezed, squeezed2, 'squeezed msgID is same')
}) })
test('Set isGhostable', (t) => { test('Set isGhostable', async (t) => {
const moot = MsgV4.createMoot(aliceID, 'set_v1__follows', aliceKeypair) const moot = MsgV4.createMoot(aliceID, 'set_v1__follows', aliceKeypair)
const mootID = MsgV4.getMsgID(moot) const mootID = MsgV4.getMsgID(moot)
assert.equal(mootID, peer.set.getFeedID('follows'), 'getFeedID') assert.equal(mootID, peer.set.getFeedID('follows'), 'getFeedID')
const tangle = peer.db.getTangle(mootID) const tangle = await p(peer.db.getTangle)(mootID)
const msgIDs = tangle.topoSort() const msgIDs = tangle.topoSort()
const itemRoots = peer.set._getItemRoots('follows') const itemRoots = peer.set._getItemRoots('follows')
@ -190,15 +190,15 @@ test('Set isGhostable', (t) => {
// Remember from the setup, that ghostSpan=4 // Remember from the setup, that ghostSpan=4
assert.equal(msgIDs.length, 9) assert.equal(msgIDs.length, 9)
assert.equal(peer.set.isGhostable(msgIDs[0], mootID), false) // moot assert.equal(await p(peer.set.isGhostable)(msgIDs[0], mootID), false) // moot
assert.equal(peer.set.isGhostable(msgIDs[1], mootID), false) assert.equal(await p(peer.set.isGhostable)(msgIDs[1], mootID), false)
assert.equal(peer.set.isGhostable(msgIDs[2], mootID), false) assert.equal(await p(peer.set.isGhostable)(msgIDs[2], mootID), false)
assert.equal(peer.set.isGhostable(msgIDs[3], mootID), false) assert.equal(await p(peer.set.isGhostable)(msgIDs[3], mootID), false)
assert.equal(peer.set.isGhostable(msgIDs[4], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[4], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[5], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[5], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[6], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[6], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[7], mootID), true) // in ghostSpan assert.equal(await p(peer.set.isGhostable)(msgIDs[7], mootID), true) // in ghostSpan
assert.equal(peer.set.isGhostable(msgIDs[8], mootID), false) // item root assert.equal(await p(peer.set.isGhostable)(msgIDs[8], mootID), false) // item root
}) })
test('teardown', async (t) => { test('teardown', async (t) => {

View File

@ -1,15 +1,15 @@
const OS = require('node:os') const OS = require('node:os')
const Path = require('node:path') const Path = require('node:path')
const rimraf = require('rimraf') const rimraf = require('rimraf')
const caps = require('ppppp-caps') const caps = require('pzp-caps')
const Keypair = require('ppppp-keypair') const Keypair = require('pzp-keypair')
function createPeer(config) { function createPeer(config) {
if (config.name) { if (config.name) {
const name = config.name const name = config.name
const tmp = OS.tmpdir() const tmp = OS.tmpdir()
config.global ??= {} config.global ??= {}
config.global.path ??= Path.join(tmp, `ppppp-set-${name}-${Date.now()}`) config.global.path ??= Path.join(tmp, `pzp-set-${name}-${Date.now()}`)
config.global.keypair ??= Keypair.generate('ed25519', name) config.global.keypair ??= Keypair.generate('ed25519', name)
delete config.name delete config.name
} }
@ -27,7 +27,7 @@ function createPeer(config) {
return require('secret-stack/bare')() return require('secret-stack/bare')()
.use(require('secret-stack/plugins/net')) .use(require('secret-stack/plugins/net'))
.use(require('secret-handshake-ext/secret-stack')) .use(require('secret-handshake-ext/secret-stack'))
.use(require('ppppp-db')) .use(require('pzp-db'))
.use(require('ssb-box')) .use(require('ssb-box'))
.use(require('../lib')) .use(require('../lib'))
.call(null, { .call(null, {