mirror of https://codeberg.org/pzp/pzp-dict.git
Merge pull request 'Support async db functions and make ready for publish' (#1) from async-db into master
Reviewed-on: https://codeberg.org/pzp/pzp-dict/pulls/1
This commit is contained in:
commit
50605c04f3
|
@ -1,25 +0,0 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches: [master]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16.x, 18.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: npm install
|
||||
- run: npm test
|
|
@ -0,0 +1,13 @@
|
|||
matrix:
|
||||
NODE_VERSION:
|
||||
- 18
|
||||
- 20
|
||||
|
||||
steps:
|
||||
test:
|
||||
when:
|
||||
event: [push]
|
||||
image: node:${NODE_VERSION}
|
||||
commands:
|
||||
- npm install
|
||||
- npm test
|
|
@ -1,9 +1,7 @@
|
|||
**Work in progress**
|
||||
# pzp-dict
|
||||
|
||||
## Installation
|
||||
|
||||
We're not on npm yet. In your package.json, include this as
|
||||
|
||||
```js
|
||||
"ppppp-dict": "github:staltz/ppppp-dict"
|
||||
npm install pzp-dict
|
||||
```
|
||||
|
|
253
lib/index.js
253
lib/index.js
|
@ -1,10 +1,11 @@
|
|||
const MsgV4 = require('ppppp-db/msg-v4')
|
||||
const MsgV4 = require('pzp-db/msg-v4')
|
||||
const pull = require('pull-stream')
|
||||
|
||||
const PREFIX = 'dict_v1__'
|
||||
|
||||
/**
|
||||
* @typedef {ReturnType<import('ppppp-db').init>} PPPPPDB
|
||||
* @typedef {import('ppppp-db').RecPresent} RecPresent
|
||||
* @typedef {ReturnType<import('pzp-db').init>} PZPDB
|
||||
* @typedef {import('pzp-db').RecPresent} RecPresent
|
||||
* @typedef {{
|
||||
* hook: (
|
||||
* cb: (
|
||||
|
@ -32,12 +33,12 @@ const PREFIX = 'dict_v1__'
|
|||
|
||||
/**
|
||||
* @template [T = any]
|
||||
* @typedef {import('ppppp-db/msg-v4').Msg<T>} Msg<T>
|
||||
* @typedef {import('pzp-db/msg-v4').Msg<T>} Msg<T>
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {T extends void ?
|
||||
* @typedef {[T] extends [void] ?
|
||||
* (...args: [Error] | []) => void :
|
||||
* (...args: [Error] | [null, T]) => void
|
||||
* } CB
|
||||
|
@ -60,7 +61,7 @@ function fromSubdomain(subdomain) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {{ db: PPPPPDB, close: ClosableHook }} peer
|
||||
* @param {{ db: PZPDB, close: ClosableHook }} peer
|
||||
* @param {Config} config
|
||||
*/
|
||||
function initDict(peer, config) {
|
||||
|
@ -245,26 +246,29 @@ function initDict(peer, config) {
|
|||
/**
|
||||
* @private
|
||||
* @param {string} subdomain
|
||||
* @returns {number}
|
||||
* @param {CB<number>} cb
|
||||
*/
|
||||
function _squeezePotential(subdomain) {
|
||||
function _squeezePotential(subdomain, cb) {
|
||||
// prettier-ignore
|
||||
if (!loadedAccountID) throw new Error('Cannot squeeze potential before loading')
|
||||
if (!loadedAccountID) return cb(Error('Cannot squeeze potential before loading'))
|
||||
// TODO: improve this so that the squeezePotential is the size of the
|
||||
// tangle suffix built as a slice from the fieldRoots
|
||||
const mootID = MsgV4.getMootID(loadedAccountID, fromSubdomain(subdomain))
|
||||
const tangle = peer.db.getTangle(mootID)
|
||||
if (!tangle) return 0
|
||||
const maxDepth = tangle.maxDepth
|
||||
const fieldRoots = _getFieldRoots(subdomain)
|
||||
let minDepth = Infinity
|
||||
for (const field in fieldRoots) {
|
||||
for (const msgID of fieldRoots[field]) {
|
||||
const depth = tangle.getDepth(msgID)
|
||||
if (depth < minDepth) minDepth = depth
|
||||
peer.db.getTangle(mootID, (err, tangle) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
if (!tangle) return cb(null, 0)
|
||||
const maxDepth = tangle.maxDepth
|
||||
const fieldRoots = _getFieldRoots(subdomain)
|
||||
let minDepth = Infinity
|
||||
for (const field in fieldRoots) {
|
||||
for (const msgID of fieldRoots[field]) {
|
||||
const depth = tangle.getDepth(msgID)
|
||||
if (depth < minDepth) minDepth = depth
|
||||
}
|
||||
}
|
||||
}
|
||||
return maxDepth - minDepth
|
||||
return cb(null, maxDepth - minDepth)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -288,7 +292,6 @@ function initDict(peer, config) {
|
|||
(err, rec) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error('Failed to create msg when force-updating Dict', { cause: err }))
|
||||
// @ts-ignore
|
||||
cb(null, true)
|
||||
}
|
||||
)
|
||||
|
@ -315,8 +318,8 @@ function initDict(peer, config) {
|
|||
// microtask is needed to ensure that loadPromise is assigned BEFORE this
|
||||
// body is executed (which in turn does inversion of control when `cb` or
|
||||
// `resolve` is called)
|
||||
queueMicrotask(() => {
|
||||
for (const rec of peer.db.records()) {
|
||||
queueMicrotask(async () => {
|
||||
for await (const rec of peer.db.records()) {
|
||||
if (!rec.msg) continue
|
||||
maybeLearnAboutDict(rec.id, rec.msg)
|
||||
}
|
||||
|
@ -347,74 +350,103 @@ function initDict(peer, config) {
|
|||
/**
|
||||
* @public
|
||||
* @param {string} tangleID
|
||||
* @returns {number}
|
||||
* @param {CB<number>} cb
|
||||
*/
|
||||
function minRequiredDepth(tangleID) {
|
||||
const tangle = peer.db.getTangle(tangleID)
|
||||
function minRequiredDepth(tangleID, cb) {
|
||||
peer.db.getTangle(tangleID, (err, tangle) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
// prettier-ignore
|
||||
if (!tangle) return 0
|
||||
// prettier-ignore
|
||||
if (!MsgV4.isMoot(tangle.root)) throw new Error(`Tangle "${tangleID}" is not a moot`)
|
||||
const domain = tangle.root.metadata.domain
|
||||
// prettier-ignore
|
||||
if (!domain.startsWith(PREFIX)) throw new Error(`Tangle "${tangleID}" is not a Dict moot`)
|
||||
// prettier-ignore
|
||||
if (!tangle) return cb(null, 0)
|
||||
// prettier-ignore
|
||||
if (!MsgV4.isMoot(tangle.root)) return cb(Error(`Tangle "${tangleID}" is not a moot`))
|
||||
const domain = tangle.root.metadata.domain
|
||||
// prettier-ignore
|
||||
if (!domain.startsWith(PREFIX)) return cb(Error(`Tangle "${tangleID}" is not a Dict moot`))
|
||||
|
||||
// Discover field roots
|
||||
const fieldRoots = new Set()
|
||||
const msgIDs = tangle.topoSort()
|
||||
for (const msgID of msgIDs) {
|
||||
const msg = peer.db.get(msgID)
|
||||
if (!msg?.data) continue
|
||||
for (const supersededMsgID of msg.data.supersedes) {
|
||||
fieldRoots.delete(supersededMsgID)
|
||||
}
|
||||
fieldRoots.add(msgID)
|
||||
}
|
||||
// Discover field roots
|
||||
const fieldRoots = new Set()
|
||||
|
||||
pull(
|
||||
pull.values(tangle.topoSort()),
|
||||
pull.asyncMap((msgID, cb) => {
|
||||
peer.db.get(msgID, (err, msg) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
// Get minimum depth of all field roots
|
||||
let minDepth = Infinity
|
||||
for (const msgID of fieldRoots) {
|
||||
const depth = tangle.getDepth(msgID)
|
||||
if (depth < minDepth) minDepth = depth
|
||||
}
|
||||
if (!msg?.data) return cb(null, null)
|
||||
for (const supersededMsgID of msg.data.supersedes) {
|
||||
fieldRoots.delete(supersededMsgID)
|
||||
}
|
||||
fieldRoots.add(msgID)
|
||||
|
||||
return minDepth
|
||||
return cb(null, null)
|
||||
})
|
||||
}),
|
||||
pull.drain(() => {}, (err) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(Error("minRequiredDepth() stream in dict failed", { cause: err }))
|
||||
|
||||
// Get minimum depth of all field roots
|
||||
let minDepth = Infinity
|
||||
for (const msgID of fieldRoots) {
|
||||
const depth = tangle.getDepth(msgID)
|
||||
if (depth < minDepth) minDepth = depth
|
||||
}
|
||||
|
||||
return cb(null, minDepth)
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @param {string} tangleID
|
||||
* @returns {number}
|
||||
* @param {CB<number>} cb
|
||||
*/
|
||||
function minGhostDepth(tangleID) {
|
||||
return Math.max(0, minRequiredDepth(tangleID) - ghostSpan)
|
||||
function minGhostDepth(tangleID, cb) {
|
||||
minRequiredDepth(tangleID, (err, minDepth) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
return cb(null, Math.max(0, minDepth - ghostSpan))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
* @returns {{[field in string]: any} | null}
|
||||
* @param {CB<{[field in string]: any} | null>} cb
|
||||
*/
|
||||
function read(id, subdomain) {
|
||||
function read(id, subdomain, cb) {
|
||||
const domain = fromSubdomain(subdomain)
|
||||
const mootID = MsgV4.getMootID(id, domain)
|
||||
const tangle = peer.db.getTangle(mootID)
|
||||
if (!tangle) {
|
||||
if (id === loadedAccountID) return {}
|
||||
else return null
|
||||
}
|
||||
const msgIDs = tangle.topoSort()
|
||||
const dict = /** @type {{[field in string]: any}} */ ({})
|
||||
for (const msgID of msgIDs) {
|
||||
const msg = peer.db.get(msgID)
|
||||
if (isValidDictMsg(msg)) {
|
||||
const { update } = msg.data
|
||||
Object.assign(dict, update)
|
||||
peer.db.getTangle(mootID, (err, tangle) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
if (!tangle) {
|
||||
if (id === loadedAccountID) return cb(null, {})
|
||||
else return cb(null, null)
|
||||
}
|
||||
}
|
||||
return dict
|
||||
|
||||
const dict = /** @type {{[field in string]: any}} */ ({})
|
||||
|
||||
pull(
|
||||
pull.values(tangle.topoSort()),
|
||||
pull.asyncMap((msgID, cb) => {
|
||||
peer.db.get(msgID, cb)
|
||||
}),
|
||||
pull.drain((msg) => {
|
||||
if (isValidDictMsg(msg)) {
|
||||
const { update } = msg.data
|
||||
Object.assign(dict, update)
|
||||
}
|
||||
}, (err) => {
|
||||
if (err) return cb(Error("dict read failed", { cause: err }))
|
||||
cb(null, dict)
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -432,20 +464,27 @@ function initDict(peer, config) {
|
|||
* @public
|
||||
* @param {MsgID} ghostableMsgID
|
||||
* @param {MsgID} tangleID
|
||||
* @param {CB<boolean>} cb
|
||||
*/
|
||||
function isGhostable(ghostableMsgID, tangleID) {
|
||||
if (ghostableMsgID === tangleID) return false
|
||||
function isGhostable(ghostableMsgID, tangleID, cb) {
|
||||
if (ghostableMsgID === tangleID) return cb(null, false)
|
||||
let i = 0
|
||||
|
||||
const msg = peer.db.get(ghostableMsgID)
|
||||
peer.db.get(ghostableMsgID, (err, msg) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
// prettier-ignore
|
||||
if (!msg) throw new Error(`isGhostable() msgID "${ghostableMsgID}" does not exist in the database`)
|
||||
// prettier-ignore
|
||||
if (!msg) return cb(Error(`isGhostable() msgID "${ghostableMsgID}" does not exist in the database`))
|
||||
|
||||
const minFieldRootDepth = minRequiredDepth(tangleID)
|
||||
const minGhostDepth = minFieldRootDepth - ghostSpan
|
||||
const msgDepth = msg.metadata.tangles[tangleID].depth
|
||||
if (minGhostDepth <= msgDepth && msgDepth < minFieldRootDepth) return true
|
||||
return false
|
||||
minRequiredDepth(tangleID, (err, minFieldRootDepth) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
const minGhostDepth = minFieldRootDepth - ghostSpan
|
||||
const msgDepth = msg.metadata.tangles[tangleID].depth
|
||||
if (minGhostDepth <= msgDepth && msgDepth < minFieldRootDepth) return cb(null, true)
|
||||
return cb(null, false)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -476,19 +515,22 @@ function initDict(peer, config) {
|
|||
loaded(() => {
|
||||
// prettier-ignore
|
||||
if (!loadedAccountID) return cb(new Error('Expected account to be loaded'))
|
||||
const dict = read(loadedAccountID, subdomain)
|
||||
// prettier-ignore
|
||||
if (!dict) return cb(new Error(`Cannot update non-existent dict "${subdomain}`))
|
||||
read(loadedAccountID, subdomain, (err, dict) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
let hasChanges = false
|
||||
for (const [field, value] of Object.entries(update)) {
|
||||
if (value !== dict[field]) {
|
||||
hasChanges = true
|
||||
break
|
||||
// prettier-ignore
|
||||
if (!dict) return cb(Error(`Cannot update non-existent dict "${subdomain}`))
|
||||
|
||||
let hasChanges = false
|
||||
for (const [field, value] of Object.entries(update)) {
|
||||
if (value !== dict[field]) {
|
||||
hasChanges = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!hasChanges) return cb(null, false)
|
||||
forceUpdate(subdomain, update, cb)
|
||||
if (!hasChanges) return cb(null, false)
|
||||
forceUpdate(subdomain, update, cb)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -498,20 +540,25 @@ function initDict(peer, config) {
|
|||
*/
|
||||
function squeeze(subdomain, cb) {
|
||||
if (!loadedAccountID) return cb(new Error('Cannot squeeze before loading'))
|
||||
const potential = _squeezePotential(subdomain)
|
||||
if (potential < 1) return cb(null, false)
|
||||
_squeezePotential(subdomain, (err, potential) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
loaded(() => {
|
||||
// prettier-ignore
|
||||
if (!loadedAccountID) return cb(new Error('Expected account to be loaded'))
|
||||
const dict = read(loadedAccountID, subdomain)
|
||||
// prettier-ignore
|
||||
if (!dict) return cb(new Error(`Cannot squeeze non-existent Dict "${subdomain}"`))
|
||||
forceUpdate(subdomain, dict, (err, _forceUpdated) => {
|
||||
if (potential < 1) return cb(null, false)
|
||||
|
||||
loaded(() => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to force update when squeezing Dict "${subdomain}"`, { cause: err }))
|
||||
// @ts-ignore
|
||||
cb(null, true)
|
||||
if (!loadedAccountID) return cb(new Error('Expected account to be loaded'))
|
||||
read(loadedAccountID, subdomain, (err, dict) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
// prettier-ignore
|
||||
if (!dict) return cb(new Error(`Cannot squeeze non-existent Dict "${subdomain}"`))
|
||||
forceUpdate(subdomain, dict, (err, _forceUpdated) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to force update when squeezing Dict "${subdomain}"`, { cause: err }))
|
||||
cb(null, true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
18
package.json
18
package.json
|
@ -1,13 +1,13 @@
|
|||
{
|
||||
"name": "ppppp-dict",
|
||||
"version": "1.0.0",
|
||||
"name": "pzp-dict",
|
||||
"version": "0.0.1",
|
||||
"description": "Dictionary data structure over append-only logs with pruning",
|
||||
"author": "Andre Staltz <contact@staltz.com>",
|
||||
"license": "MIT",
|
||||
"homepage": "https://github.com/staltz/ppppp-dict",
|
||||
"homepage": "https://codeberg.org/pzp/pzp-dict",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git@github.com:staltz/ppppp-dict.git"
|
||||
"url": "git@codeberg.org:pzp/pzp-dict.git"
|
||||
},
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
|
@ -25,16 +25,18 @@
|
|||
"node": ">=16"
|
||||
},
|
||||
"dependencies": {
|
||||
"pull-stream": "^3.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/pull-stream": "^3.6.7",
|
||||
"bs58": "^5.0.0",
|
||||
"c8": "7",
|
||||
"ppppp-db": "github:staltz/ppppp-db#667b33779d98aff12a9b0cd2d7c80469a95cd04e",
|
||||
"ppppp-caps": "github:staltz/ppppp-caps#93fa810b9a40b78aef4872d4c2a8412cccb52929",
|
||||
"ppppp-keypair": "github:staltz/ppppp-keypair#61ef4420578f450dc2cc7b1efc1c5a691a871c74",
|
||||
"pzp-caps": "^1.0.0",
|
||||
"pzp-db": "^1.0.1",
|
||||
"pzp-keypair": "^1.0.0",
|
||||
"rimraf": "^4.4.0",
|
||||
"secret-stack": "~8.1.0",
|
||||
"secret-handshake-ext": "0.0.10",
|
||||
"secret-stack": "~8.1.0",
|
||||
"ssb-box": "^1.0.1",
|
||||
"typescript": "^5.1.3"
|
||||
},
|
||||
|
|
|
@ -3,19 +3,19 @@ const assert = require('node:assert')
|
|||
const path = require('path')
|
||||
const os = require('os')
|
||||
const rimraf = require('rimraf')
|
||||
const MsgV4 = require('ppppp-db/msg-v4')
|
||||
const Keypair = require('ppppp-keypair')
|
||||
const MsgV4 = require('pzp-db/msg-v4')
|
||||
const Keypair = require('pzp-keypair')
|
||||
const p = require('util').promisify
|
||||
const { createPeer } = require('./util')
|
||||
|
||||
const DIR = path.join(os.tmpdir(), 'ppppp-dict')
|
||||
const DIR = path.join(os.tmpdir(), 'pzp-dict')
|
||||
rimraf.sync(DIR)
|
||||
|
||||
const aliceKeypair = Keypair.generate('ed25519', 'alice')
|
||||
|
||||
function getMsgID(peer, index, domain) {
|
||||
async function getMsgID(peer, index, domain) {
|
||||
let i = 0
|
||||
for (const rec of peer.db.records()) {
|
||||
for await (const rec of peer.db.records()) {
|
||||
if (rec.msg.metadata.domain === domain && !!rec.msg.data) {
|
||||
if (i === index) return rec.id
|
||||
i++
|
||||
|
@ -53,16 +53,16 @@ test('Dict update() and get()', async (t) => {
|
|||
await p(peer.dict.update)('profile', { name: 'alice' }),
|
||||
'update .name'
|
||||
)
|
||||
const UPDATE0_ID = getMsgID(peer, 0, 'dict_v1__profile')
|
||||
assert.deepEqual(peer.dict.read(aliceID, 'profile'), { name: 'alice' }, 'get')
|
||||
const UPDATE0_ID = await getMsgID(peer, 0, 'dict_v1__profile')
|
||||
assert.deepEqual(await p(peer.dict.read)(aliceID, 'profile'), { name: 'alice' }, 'get')
|
||||
|
||||
const fieldRoots1 = peer.dict._getFieldRoots('profile')
|
||||
assert.deepEqual(fieldRoots1, { name: [UPDATE0_ID] }, 'fieldRoots')
|
||||
|
||||
assert(await p(peer.dict.update)('profile', { age: 20 }), 'update .age')
|
||||
const UPDATE1_ID = getMsgID(peer, 1, 'dict_v1__profile')
|
||||
const UPDATE1_ID = await getMsgID(peer, 1, 'dict_v1__profile')
|
||||
assert.deepEqual(
|
||||
peer.dict.read(aliceID, 'profile'),
|
||||
await p(peer.dict.read)(aliceID, 'profile'),
|
||||
{ name: 'alice', age: 20 },
|
||||
'get'
|
||||
)
|
||||
|
@ -80,7 +80,7 @@ test('Dict update() and get()', async (t) => {
|
|||
'redundant update .name'
|
||||
)
|
||||
assert.deepEqual(
|
||||
peer.dict.read(aliceID, 'profile'),
|
||||
await p(peer.dict.read)(aliceID, 'profile'),
|
||||
{ name: 'alice', age: 20 },
|
||||
'get'
|
||||
)
|
||||
|
@ -90,9 +90,9 @@ test('Dict update() and get()', async (t) => {
|
|||
true,
|
||||
'update .name'
|
||||
)
|
||||
const UPDATE2_ID = getMsgID(peer, 2, 'dict_v1__profile')
|
||||
const UPDATE2_ID = await getMsgID(peer, 2, 'dict_v1__profile')
|
||||
assert.deepEqual(
|
||||
peer.dict.read(aliceID, 'profile'),
|
||||
await p(peer.dict.read)(aliceID, 'profile'),
|
||||
{ name: 'Alice', age: 20 },
|
||||
'get'
|
||||
)
|
||||
|
@ -109,8 +109,8 @@ test('Dict squeeze', async (t) => {
|
|||
assert(await p(peer.dict.update)('profile', { age: 21 }), 'update .age')
|
||||
assert(await p(peer.dict.update)('profile', { age: 22 }), 'update .age')
|
||||
assert(await p(peer.dict.update)('profile', { age: 23 }), 'update .age')
|
||||
const UPDATE2_ID = getMsgID(peer, 2, 'dict_v1__profile')
|
||||
const UPDATE5_ID = getMsgID(peer, 5, 'dict_v1__profile')
|
||||
const UPDATE2_ID = await getMsgID(peer, 2, 'dict_v1__profile')
|
||||
const UPDATE5_ID = await getMsgID(peer, 5, 'dict_v1__profile')
|
||||
|
||||
const fieldRoots4 = peer.dict._getFieldRoots('profile')
|
||||
assert.deepEqual(
|
||||
|
@ -119,9 +119,9 @@ test('Dict squeeze', async (t) => {
|
|||
'fieldRoots'
|
||||
)
|
||||
|
||||
assert.equal(peer.dict._squeezePotential('profile'), 3, 'squeezePotential=3')
|
||||
assert.equal(await p(peer.dict._squeezePotential)('profile'), 3, 'squeezePotential=3')
|
||||
assert.equal(await p(peer.dict.squeeze)('profile'), true, 'squeezed')
|
||||
const UPDATE6_ID = getMsgID(peer, 6, 'dict_v1__profile')
|
||||
const UPDATE6_ID = await getMsgID(peer, 6, 'dict_v1__profile')
|
||||
|
||||
const fieldRoots5 = peer.dict._getFieldRoots('profile')
|
||||
assert.deepEqual(
|
||||
|
@ -130,7 +130,7 @@ test('Dict squeeze', async (t) => {
|
|||
'fieldRoots'
|
||||
)
|
||||
|
||||
assert.equal(peer.dict._squeezePotential('profile'), 0, 'squeezePotential=0')
|
||||
assert.equal(await p(peer.dict._squeezePotential)('profile'), 0, 'squeezePotential=0')
|
||||
assert.equal(
|
||||
await p(peer.dict.squeeze)('profile'),
|
||||
false,
|
||||
|
@ -141,13 +141,13 @@ test('Dict squeeze', async (t) => {
|
|||
assert.deepEqual(fieldRoots6, fieldRoots5, 'fieldRoots')
|
||||
})
|
||||
|
||||
test('Dict isGhostable', (t) => {
|
||||
test('Dict isGhostable', async (t) => {
|
||||
const moot = MsgV4.createMoot(aliceID, 'dict_v1__profile', aliceKeypair)
|
||||
const mootID = MsgV4.getMsgID(moot)
|
||||
|
||||
assert.equal(mootID, peer.dict.getFeedID('profile'), 'getFeedID')
|
||||
|
||||
const tangle = peer.db.getTangle(mootID)
|
||||
const tangle = await p(peer.db.getTangle)(mootID)
|
||||
const msgIDs = tangle.topoSort()
|
||||
|
||||
const fieldRoots = peer.dict._getFieldRoots('profile')
|
||||
|
@ -155,23 +155,23 @@ test('Dict isGhostable', (t) => {
|
|||
|
||||
// Remember from the setup, that ghostSpan=4
|
||||
assert.equal(msgIDs.length, 8)
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[0], mootID), false) // moot
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[1], mootID), false)
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[2], mootID), false)
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[3], mootID), true) // in ghostSpan
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[4], mootID), true) // in ghostSpan
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[5], mootID), true) // in ghostSpan
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[6], mootID), true) // in ghostSpan
|
||||
assert.equal(peer.dict.isGhostable(msgIDs[7], mootID), false) // field root
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[0], mootID), false) // moot
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[1], mootID), false)
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[2], mootID), false)
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[3], mootID), true) // in ghostSpan
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[4], mootID), true) // in ghostSpan
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[5], mootID), true) // in ghostSpan
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[6], mootID), true) // in ghostSpan
|
||||
assert.equal(await p(peer.dict.isGhostable)(msgIDs[7], mootID), false) // field root
|
||||
})
|
||||
|
||||
test('Dict receives old branched update', async (t) => {
|
||||
const UPDATE6_ID = getMsgID(peer, 6, 'dict_v1__profile')
|
||||
const UPDATE6_ID = await getMsgID(peer, 6, 'dict_v1__profile')
|
||||
|
||||
const moot = MsgV4.createMoot(aliceID, 'dict_v1__profile', aliceKeypair)
|
||||
const mootID = MsgV4.getMsgID(moot)
|
||||
|
||||
assert.equal(peer.dict.minRequiredDepth(mootID), 7, 'minRequiredDepth')
|
||||
assert.equal(await p(peer.dict.minRequiredDepth)(mootID), 7, 'minRequiredDepth')
|
||||
|
||||
const tangle = new MsgV4.Tangle(mootID)
|
||||
tangle.add(mootID, moot)
|
||||
|
@ -199,9 +199,9 @@ test('Dict receives old branched update', async (t) => {
|
|||
'fieldRoots'
|
||||
)
|
||||
|
||||
assert.equal(peer.dict.minRequiredDepth(mootID), 1, 'minRequiredDepth')
|
||||
assert.equal(await p(peer.dict.minRequiredDepth)(mootID), 1, 'minRequiredDepth')
|
||||
|
||||
assert.equal(peer.dict._squeezePotential('profile'), 6, 'squeezePotential=6')
|
||||
assert.equal(await p(peer.dict._squeezePotential)('profile'), 6, 'squeezePotential=6')
|
||||
})
|
||||
|
||||
test('teardown', async (t) => {
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
const OS = require('node:os')
|
||||
const Path = require('node:path')
|
||||
const rimraf = require('rimraf')
|
||||
const caps = require('ppppp-caps')
|
||||
const Keypair = require('ppppp-keypair')
|
||||
const caps = require('pzp-caps')
|
||||
const Keypair = require('pzp-keypair')
|
||||
|
||||
function createPeer(config) {
|
||||
if (config.name) {
|
||||
const name = config.name
|
||||
const tmp = OS.tmpdir()
|
||||
config.global ??= {}
|
||||
config.global.path ??= Path.join(tmp, `ppppp-dict-${name}-${Date.now()}`)
|
||||
config.global.path ??= Path.join(tmp, `pzp-dict-${name}-${Date.now()}`)
|
||||
config.global.keypair ??= Keypair.generate('ed25519', name)
|
||||
delete config.name
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ function createPeer(config) {
|
|||
return require('secret-stack/bare')()
|
||||
.use(require('secret-stack/plugins/net'))
|
||||
.use(require('secret-handshake-ext/secret-stack'))
|
||||
.use(require('ppppp-db'))
|
||||
.use(require('pzp-db'))
|
||||
.use(require('ssb-box'))
|
||||
.use(require('../lib'))
|
||||
.call(null, {
|
||||
|
|
|
@ -1,13 +1,22 @@
|
|||
{
|
||||
"include": ["lib/**/*.js"],
|
||||
"exclude": ["coverage/", "node_modules/", "test/"],
|
||||
"include": [
|
||||
"lib/**/*.js"
|
||||
],
|
||||
"exclude": [
|
||||
"coverage/",
|
||||
"node_modules/",
|
||||
"test/"
|
||||
],
|
||||
"compilerOptions": {
|
||||
"checkJs": true,
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"exactOptionalPropertyTypes": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"lib": ["es2022", "dom"],
|
||||
"lib": [
|
||||
"es2022",
|
||||
"dom"
|
||||
],
|
||||
"module": "node16",
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
|
|
Loading…
Reference in New Issue