mirror of https://codeberg.org/pzp/pzp-set.git
update to use msg-v3
This commit is contained in:
parent
ccf1b73cdd
commit
69f5ce52c3
498
lib/index.js
498
lib/index.js
|
@ -1,79 +1,149 @@
|
|||
const FeedV1 = require('ppppp-db/feed-v1')
|
||||
const MsgV3 = require('ppppp-db/msg-v3')
|
||||
|
||||
const PREFIX = 'set_v1__'
|
||||
|
||||
/** @typedef {string} Subtype */
|
||||
|
||||
/** @typedef {string} MsgHash */
|
||||
|
||||
/** @typedef {`${Subtype}.${string}`} SubtypeItem */
|
||||
/**
|
||||
* @typedef {ReturnType<import('ppppp-db').init>} PPPPPDB
|
||||
* @typedef {import('ppppp-db').RecPresent} RecPresent
|
||||
* @typedef {{
|
||||
* hook: (
|
||||
* cb: (
|
||||
* this: any,
|
||||
* fn: (this: any, ...a: Array<any>) => any,
|
||||
* args: Array<any>
|
||||
* ) => void
|
||||
* ) => void
|
||||
* }} ClosableHook
|
||||
* @typedef {string} Subdomain
|
||||
* @typedef {string} MsgID
|
||||
* @typedef {`${Subdomain}/${string}`} SubdomainItem
|
||||
* @typedef {{
|
||||
* add: Array<string>,
|
||||
* del: Array<string>,
|
||||
* supersedes: Array<MsgID>,
|
||||
* }} SetData
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} type
|
||||
* @returns {Subtype}
|
||||
* @template [T = any]
|
||||
* @typedef {import('ppppp-db/msg-v3').Msg<T>} Msg<T>
|
||||
*/
|
||||
function toSubtype(type) {
|
||||
return type.slice(PREFIX.length)
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {T extends void ?
|
||||
* (...args: [Error] | []) => void :
|
||||
* (...args: [Error] | [null, T]) => void
|
||||
* } CB
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} domain
|
||||
* @returns {Subdomain}
|
||||
*/
|
||||
function toSubdomain(domain) {
|
||||
return domain.slice(PREFIX.length)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Subtype} subtype
|
||||
* @param {Subdomain} subdomain
|
||||
* @returns {string}
|
||||
*/
|
||||
function fromSubtype(subtype) {
|
||||
return PREFIX + subtype
|
||||
function fromSubdomain(subdomain) {
|
||||
return PREFIX + subdomain
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* db: PPPPPDB | null,
|
||||
* close: ClosableHook,
|
||||
* }} peer
|
||||
* @returns {asserts peer is { db: PPPPPDB, close: ClosableHook }}
|
||||
*/
|
||||
function assertDBExists(peer) {
|
||||
if (!peer.db) throw new Error('record plugin requires ppppp-db plugin')
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {unknown} check
|
||||
* @param {string} message
|
||||
* @returns {asserts check}
|
||||
*/
|
||||
function assert(check, message) {
|
||||
if (!check) throw new Error(message)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'set',
|
||||
manifest: {
|
||||
add: 'async',
|
||||
del: 'async',
|
||||
has: 'sync',
|
||||
values: 'sync',
|
||||
getItemRoots: 'sync',
|
||||
squeeze: 'async',
|
||||
},
|
||||
init(peer, config) {
|
||||
//#region state
|
||||
const myWho = FeedV1.stripAuthor(config.keys.id)
|
||||
let cancelListeningToRecordAdded = null
|
||||
manifest: {},
|
||||
|
||||
/** @type {Map<Subtype, unknown>} */
|
||||
const tangles = new Map()
|
||||
/**
|
||||
* @param {{ db: PPPPPDB | null, close: ClosableHook }} peer
|
||||
* @param {any} config
|
||||
*/
|
||||
init(peer, config) {
|
||||
assertDBExists(peer)
|
||||
|
||||
//#region state
|
||||
let accountID = /** @type {string | null} */ (null)
|
||||
let loadPromise = /** @type {Promise<void> | null} */ (null)
|
||||
let cancelOnRecordAdded = /** @type {CallableFunction | null} */ (null)
|
||||
const tangles = /** @type {Map<Subdomain, MsgV3.Tangle>} */ (new Map())
|
||||
|
||||
const itemRoots = {
|
||||
/** @type {Map<SubtypeItem, Set<MsgHash>} */
|
||||
_map: new Map(),
|
||||
_getKey(subtype, item) {
|
||||
return subtype + '/' + item
|
||||
_map: /** @type {Map<SubdomainItem, Set<MsgID>>} */ (new Map()),
|
||||
/**
|
||||
* @param {string} subdomain
|
||||
* @param {string} item
|
||||
* @returns {SubdomainItem}
|
||||
*/
|
||||
_getKey(subdomain, item) {
|
||||
return `${subdomain}/${item}`
|
||||
},
|
||||
get(subtype, item = null) {
|
||||
if (item) {
|
||||
const key = this._getKey(subtype, item)
|
||||
return this._map.get(key)
|
||||
} else {
|
||||
const out = {}
|
||||
for (const [key, value] of this._map.entries()) {
|
||||
if (key.startsWith(subtype + '/')) {
|
||||
const item = key.slice(subtype.length + 1)
|
||||
out[item] = [...value]
|
||||
}
|
||||
/**
|
||||
* @param {string} subdomain
|
||||
* @returns {Record<string, Array<MsgID>>}
|
||||
*/
|
||||
getAll(subdomain) {
|
||||
const out = /** @type {Record<string, Array<MsgID>>} */ ({})
|
||||
for (const [key, value] of this._map.entries()) {
|
||||
if (key.startsWith(subdomain + '/')) {
|
||||
const item = key.slice(subdomain.length + 1)
|
||||
out[item] = [...value]
|
||||
}
|
||||
return out
|
||||
}
|
||||
return out
|
||||
},
|
||||
add(subtype, item, msgHash) {
|
||||
const key = this._getKey(subtype, item)
|
||||
/**
|
||||
* @param {string} subdomain
|
||||
* @param {string} item
|
||||
* @returns {Set<MsgID> | undefined}
|
||||
*/
|
||||
get(subdomain, item) {
|
||||
const key = this._getKey(subdomain, item)
|
||||
return this._map.get(key)
|
||||
},
|
||||
/**
|
||||
* @param {string} subdomain
|
||||
* @param {string} item
|
||||
* @param {string} msgID
|
||||
*/
|
||||
add(subdomain, item, msgID) {
|
||||
const key = this._getKey(subdomain, item)
|
||||
const set = this._map.get(key) ?? new Set()
|
||||
set.add(msgHash)
|
||||
set.add(msgID)
|
||||
return this._map.set(key, set)
|
||||
},
|
||||
del(subtype, item, msgHash) {
|
||||
const key = this._getKey(subtype, item)
|
||||
/**
|
||||
* @param {string} subdomain
|
||||
* @param {string} item
|
||||
* @param {string} msgID
|
||||
*/
|
||||
del(subdomain, item, msgID) {
|
||||
const key = this._getKey(subdomain, item)
|
||||
const set = this._map.get(key)
|
||||
if (!set) return false
|
||||
set.delete(msgHash)
|
||||
set.delete(msgID)
|
||||
if (set.size === 0) this._map.delete(key)
|
||||
return true
|
||||
},
|
||||
|
@ -84,53 +154,58 @@ module.exports = {
|
|||
//#endregion
|
||||
|
||||
//#region active processes
|
||||
const loadPromise = new Promise((resolve, reject) => {
|
||||
for (const { hash, msg } of peer.db.records()) {
|
||||
maybeLearnAboutSet(hash, msg)
|
||||
}
|
||||
cancelListeningToRecordAdded = peer.db.onRecordAdded(({ hash, msg }) => {
|
||||
maybeLearnAboutSet(hash, msg)
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
|
||||
peer.close.hook(function (fn, args) {
|
||||
cancelListeningToRecordAdded()
|
||||
cancelOnRecordAdded?.()
|
||||
fn.apply(this, args)
|
||||
})
|
||||
//#endregion
|
||||
|
||||
//#region internal methods
|
||||
function isValidSetRootMsg(msg) {
|
||||
/**
|
||||
* @private
|
||||
* @param {Msg | null | undefined} msg
|
||||
* @returns {msg is Msg}
|
||||
*/
|
||||
function isValidSetMoot(msg) {
|
||||
if (!msg) return false
|
||||
if (msg.metadata.who !== myWho) return false
|
||||
const type = msg.metadata.type
|
||||
if (!type.startsWith(PREFIX)) return false
|
||||
return FeedV1.isFeedRoot(msg, config.keys.id, type)
|
||||
if (msg.metadata.account !== accountID) return false
|
||||
const domain = msg.metadata.domain
|
||||
if (!domain.startsWith(PREFIX)) return false
|
||||
return MsgV3.isMoot(msg, accountID, domain)
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @param {Msg | null | undefined} msg
|
||||
* @returns {msg is Msg<SetData>}
|
||||
*/
|
||||
function isValidSetMsg(msg) {
|
||||
if (!msg) return false
|
||||
if (!msg.content) return false
|
||||
if (msg.metadata.who !== myWho) return false
|
||||
if (!msg.metadata.type.startsWith(PREFIX)) return false
|
||||
if (!Array.isArray(msg.content.add)) return false
|
||||
if (!Array.isArray(msg.content.del)) return false
|
||||
if (!Array.isArray(msg.content.supersedes)) return false
|
||||
if (!msg.data) return false
|
||||
if (msg.metadata.account !== accountID) return false
|
||||
if (!msg.metadata.domain.startsWith(PREFIX)) return false
|
||||
if (!Array.isArray(msg.data.add)) return false
|
||||
if (!Array.isArray(msg.data.del)) return false
|
||||
if (!Array.isArray(msg.data.supersedes)) return false
|
||||
return true
|
||||
}
|
||||
|
||||
function readSet(authorId, subtype) {
|
||||
const type = fromSubtype(subtype)
|
||||
const rootHash = FeedV1.getFeedRootHash(authorId, type)
|
||||
const tangle = peer.db.getTangle(rootHash)
|
||||
if (!tangle || tangle.size() === 0) return new Set()
|
||||
const msgHashes = tangle.topoSort()
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
*/
|
||||
function readSet(id, subdomain) {
|
||||
assertDBExists(peer)
|
||||
const domain = fromSubdomain(subdomain)
|
||||
const mootID = MsgV3.getMootID(id, domain)
|
||||
const tangle = peer.db.getTangle(mootID)
|
||||
if (!tangle || tangle.size === 0) return new Set()
|
||||
const msgIDs = tangle.topoSort()
|
||||
const set = new Set()
|
||||
for (const msgHash of msgHashes) {
|
||||
const msg = peer.db.get(msgHash)
|
||||
for (const msgID of msgIDs) {
|
||||
const msg = peer.db.get(msgID)
|
||||
if (isValidSetMsg(msg)) {
|
||||
const { add, del } = msg.content
|
||||
const { add, del } = msg.data
|
||||
for (const value of add) set.add(value)
|
||||
for (const value of del) set.delete(value)
|
||||
}
|
||||
|
@ -138,67 +213,88 @@ module.exports = {
|
|||
return set
|
||||
}
|
||||
|
||||
function learnSetRoot(hash, msg) {
|
||||
const { type } = msg.metadata
|
||||
const subtype = toSubtype(type)
|
||||
const tangle = tangles.get(subtype) ?? new FeedV1.Tangle(hash)
|
||||
tangle.add(hash, msg)
|
||||
tangles.set(subtype, tangle)
|
||||
/**
|
||||
* @param {string} mootID
|
||||
* @param {Msg} moot
|
||||
*/
|
||||
function learnSetMoot(mootID, moot) {
|
||||
const { domain } = moot.metadata
|
||||
const subdomain = toSubdomain(domain)
|
||||
const tangle = tangles.get(subdomain) ?? new MsgV3.Tangle(mootID)
|
||||
tangle.add(mootID, moot)
|
||||
tangles.set(subdomain, tangle)
|
||||
}
|
||||
|
||||
function learnSetUpdate(hash, msg) {
|
||||
const { who, type } = msg.metadata
|
||||
const rootHash = FeedV1.getFeedRootHash(who, type)
|
||||
const subtype = toSubtype(type)
|
||||
const tangle = tangles.get(subtype) ?? new FeedV1.Tangle(rootHash)
|
||||
tangle.add(hash, msg)
|
||||
tangles.set(subtype, tangle)
|
||||
const addOrRemove = [].concat(msg.content.add, msg.content.del)
|
||||
for (const item of addOrRemove) {
|
||||
const existing = itemRoots.get(subtype, item)
|
||||
/**
|
||||
* @param {string} msgID
|
||||
* @param {Msg<SetData>} msg
|
||||
*/
|
||||
function learnSetUpdate(msgID, msg) {
|
||||
const { account, domain } = msg.metadata
|
||||
const mootID = MsgV3.getMootID(account, domain)
|
||||
const subdomain = toSubdomain(domain)
|
||||
const tangle = tangles.get(subdomain) ?? new MsgV3.Tangle(mootID)
|
||||
tangle.add(msgID, msg)
|
||||
tangles.set(subdomain, tangle)
|
||||
const addOrDel = msg.data.add.concat(msg.data.del)
|
||||
for (const item of addOrDel) {
|
||||
const existing = itemRoots.get(subdomain, item)
|
||||
if (!existing || existing.size === 0) {
|
||||
itemRoots.add(subtype, item, hash)
|
||||
itemRoots.add(subdomain, item, msgID)
|
||||
} else {
|
||||
for (const existingHash of existing) {
|
||||
if (tangle.precedes(existingHash, hash)) {
|
||||
itemRoots.del(subtype, item, existingHash)
|
||||
itemRoots.add(subtype, item, hash)
|
||||
for (const existingID of existing) {
|
||||
if (tangle.precedes(existingID, msgID)) {
|
||||
itemRoots.del(subdomain, item, existingID)
|
||||
itemRoots.add(subdomain, item, msgID)
|
||||
} else {
|
||||
itemRoots.add(subtype, item, hash)
|
||||
itemRoots.add(subdomain, item, msgID)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function maybeLearnAboutSet(hash, msg) {
|
||||
if (msg.metadata.who !== myWho) return
|
||||
if (isValidSetRootMsg(msg)) {
|
||||
learnSetRoot(hash, msg)
|
||||
/**
|
||||
* @param {string} msgID
|
||||
* @param {Msg} msg
|
||||
*/
|
||||
function maybeLearnAboutSet(msgID, msg) {
|
||||
if (msg.metadata.account !== accountID) return
|
||||
if (isValidSetMoot(msg)) {
|
||||
learnSetMoot(msgID, msg)
|
||||
return
|
||||
}
|
||||
if (isValidSetMsg(msg)) {
|
||||
learnSetUpdate(hash, msg)
|
||||
learnSetUpdate(msgID, msg)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @private
|
||||
* @param {CB<void>} cb
|
||||
*/
|
||||
function loaded(cb) {
|
||||
if (cb === void 0) return loadPromise
|
||||
else loadPromise.then(() => cb(null), cb)
|
||||
else loadPromise?.then(() => cb(), cb)
|
||||
}
|
||||
|
||||
function _squeezePotential(subtype) {
|
||||
/**
|
||||
* @param {string} subdomain
|
||||
*/
|
||||
function _squeezePotential(subdomain) {
|
||||
assertDBExists(peer)
|
||||
if (!accountID) throw new Error('Cannot squeeze potential before loading')
|
||||
// TODO: improve this so that the squeezePotential is the size of the
|
||||
// tangle suffix built as a slice from the fieldRoots
|
||||
const rootHash = FeedV1.getFeedRootHash(myWho, fromSubtype(subtype))
|
||||
const tangle = peer.db.getTangle(rootHash)
|
||||
const maxDepth = tangle.getMaxDepth()
|
||||
const currentItemRoots = itemRoots.get(subtype)
|
||||
const mootID = MsgV3.getMootID(accountID, fromSubdomain(subdomain))
|
||||
const tangle = peer.db.getTangle(mootID)
|
||||
const maxDepth = tangle.maxDepth
|
||||
const currentItemRoots = itemRoots.getAll(subdomain)
|
||||
let minDepth = Infinity
|
||||
for (const item in currentItemRoots) {
|
||||
for (const msgHash of currentItemRoots[item]) {
|
||||
const depth = tangle.getDepth(msgHash)
|
||||
for (const msgID of currentItemRoots[item]) {
|
||||
const depth = tangle.getDepth(msgID)
|
||||
if (depth < minDepth) minDepth = depth
|
||||
}
|
||||
}
|
||||
|
@ -207,117 +303,191 @@ module.exports = {
|
|||
//#endregion
|
||||
|
||||
//#region public methods
|
||||
function add(authorId, subtype, value, cb) {
|
||||
const who = FeedV1.stripAuthor(authorId)
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {CB<void>} cb
|
||||
*/
|
||||
function load(id, cb) {
|
||||
assertDBExists(peer)
|
||||
accountID = id
|
||||
loadPromise = new Promise((resolve, reject) => {
|
||||
for (const rec of peer.db.records()) {
|
||||
if (!rec.msg) continue
|
||||
maybeLearnAboutSet(rec.id, rec.msg)
|
||||
}
|
||||
cancelOnRecordAdded = peer.db.onRecordAdded(
|
||||
(/** @type {RecPresent} */ rec) => {
|
||||
try {
|
||||
maybeLearnAboutSet(rec.id, rec.msg)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
)
|
||||
resolve()
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
* @param {string} value
|
||||
* @param {CB<boolean>} cb
|
||||
*/
|
||||
function add(id, subdomain, value, cb) {
|
||||
assertDBExists(peer)
|
||||
assert(!!accountID, 'Cannot add to Set before loading')
|
||||
// prettier-ignore
|
||||
if (who !== myWho) return cb(new Error(`Cannot add to another user's Set (${authorId}/${subtype})`))
|
||||
if (id !== accountID) return cb(new Error(`Cannot add to another user's Set (${id}/${subdomain})`))
|
||||
|
||||
loaded(() => {
|
||||
const currentSet = readSet(authorId, subtype)
|
||||
assert(!!accountID, 'Cannot add to Set before loading')
|
||||
const currentSet = readSet(id, subdomain)
|
||||
if (currentSet.has(value)) return cb(null, false)
|
||||
const type = fromSubtype(subtype)
|
||||
const domain = fromSubdomain(subdomain)
|
||||
|
||||
// Populate supersedes
|
||||
const supersedes = []
|
||||
const toDeleteFromItemRoots = new Map()
|
||||
const currentItemRoots = itemRoots.get(subtype)
|
||||
const currentItemRoots = itemRoots.getAll(subdomain)
|
||||
for (const item in currentItemRoots) {
|
||||
// If we are re-adding this item, OR if this item has been deleted,
|
||||
// then we should update roots
|
||||
if (item === value || !currentSet.has(item)) {
|
||||
supersedes.push(...currentItemRoots[item])
|
||||
for (const msgHash of currentItemRoots[item]) {
|
||||
toDeleteFromItemRoots.set(msgHash, item)
|
||||
for (const msgID of currentItemRoots[item]) {
|
||||
toDeleteFromItemRoots.set(msgID, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const content = { add: [value], del: [], supersedes }
|
||||
peer.db.create({ type, content }, (err) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to create msg when adding to Set (${authorId}/${subtype})`, { cause: err }))
|
||||
for (const [msgHash, item] of toDeleteFromItemRoots) {
|
||||
itemRoots.del(subtype, item, msgHash)
|
||||
const data = { add: [value], del: [], supersedes }
|
||||
peer.db.feed.publish(
|
||||
{ account: accountID, domain, data },
|
||||
(err, rec) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to create msg when adding to Set (${id}/${subdomain})`, { cause: err }))
|
||||
for (const [msgID, item] of toDeleteFromItemRoots) {
|
||||
itemRoots.del(subdomain, item, msgID)
|
||||
}
|
||||
// @ts-ignore
|
||||
cb(null, true)
|
||||
}
|
||||
cb(null, true)
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function del(authorId, subtype, value, cb) {
|
||||
const who = FeedV1.stripAuthor(authorId)
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
* @param {string} value
|
||||
* @param {CB<boolean>} cb
|
||||
*/
|
||||
function del(id, subdomain, value, cb) {
|
||||
assertDBExists(peer)
|
||||
assert(!!accountID, 'Cannot add to Set before loading')
|
||||
// prettier-ignore
|
||||
if (who !== myWho) return cb(new Error(`Cannot delete from another user's Set (${authorId}/${subtype})`))
|
||||
if (id !== accountID) return cb(new Error(`Cannot delete from another user's Set (${id}/${subdomain})`))
|
||||
|
||||
loaded(() => {
|
||||
const currentSet = readSet(authorId, subtype)
|
||||
assert(!!accountID, 'Cannot add to Set before loading')
|
||||
const currentSet = readSet(id, subdomain)
|
||||
if (!currentSet.has(value)) return cb(null, false)
|
||||
const type = fromSubtype(subtype)
|
||||
const domain = fromSubdomain(subdomain)
|
||||
|
||||
// Populate supersedes
|
||||
const supersedes = []
|
||||
const currentItemRoots = itemRoots.get(subtype)
|
||||
const currentItemRoots = itemRoots.getAll(subdomain)
|
||||
for (const item in currentItemRoots) {
|
||||
if (item === value || !currentSet.has(item)) {
|
||||
supersedes.push(...currentItemRoots[item])
|
||||
}
|
||||
}
|
||||
|
||||
const content = { add: [], del: [value], supersedes }
|
||||
peer.db.create({ type, content }, (err) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to create msg when deleting from Set (${authorId}/${subtype})`, { cause: err }))
|
||||
cb(null, true)
|
||||
})
|
||||
const data = { add: [], del: [value], supersedes }
|
||||
peer.db.feed.publish(
|
||||
{ account: accountID, domain, data },
|
||||
(err, rec) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to create msg when deleting from Set (${id}/${subdomain})`, { cause: err }))
|
||||
// @ts-ignore
|
||||
cb(null, true)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function has(authorId, subtype, value) {
|
||||
const set = readSet(authorId, subtype)
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
* @param {any} value
|
||||
*/
|
||||
function has(id, subdomain, value) {
|
||||
const set = readSet(id, subdomain)
|
||||
return set.has(value)
|
||||
}
|
||||
|
||||
function values(authorId, subtype) {
|
||||
const set = readSet(authorId, subtype)
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
*/
|
||||
function values(id, subdomain) {
|
||||
const set = readSet(id, subdomain)
|
||||
return [...set]
|
||||
}
|
||||
|
||||
function getItemRoots(authorId, subtype) {
|
||||
const who = FeedV1.stripAuthor(authorId)
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {any} subdomain
|
||||
*/
|
||||
function getItemRoots(id, subdomain) {
|
||||
// prettier-ignore
|
||||
if (who !== myWho) return cb(new Error(`Cannot getItemRoots of another user's Set. (${authorId}/${subtype})`))
|
||||
return itemRoots.get(subtype)
|
||||
if (id !== accountID) throw new Error(`Cannot getItemRoots of another user's Set. (${id}/${subdomain})`)
|
||||
return itemRoots.getAll(subdomain)
|
||||
}
|
||||
|
||||
function squeeze(authorId, subtype, cb) {
|
||||
const who = FeedV1.stripAuthor(authorId)
|
||||
/**
|
||||
* @param {string} id
|
||||
* @param {string} subdomain
|
||||
* @param {CB<boolean>} cb
|
||||
*/
|
||||
function squeeze(id, subdomain, cb) {
|
||||
assertDBExists(peer)
|
||||
assert(!!accountID, 'Cannot squeeze Set before loading')
|
||||
// prettier-ignore
|
||||
if (who !== myWho) return cb(new Error(`Cannot squeeze another user's Set (${authorId}/${subtype})`))
|
||||
if (id !== accountID) return cb(new Error(`Cannot squeeze another user's Set (${id}/${subdomain})`))
|
||||
|
||||
const potential = _squeezePotential(subtype)
|
||||
const potential = _squeezePotential(subdomain)
|
||||
if (potential < 1) return cb(null, false)
|
||||
|
||||
loaded(() => {
|
||||
const type = fromSubtype(subtype)
|
||||
const currentSet = readSet(authorId, subtype)
|
||||
assert(!!accountID, 'Cannot squeeze Set before loading')
|
||||
const domain = fromSubdomain(subdomain)
|
||||
const currentSet = readSet(id, subdomain)
|
||||
|
||||
const supersedes = []
|
||||
const currentItemRoots = itemRoots.get(subtype)
|
||||
const currentItemRoots = itemRoots.getAll(subdomain)
|
||||
for (const item in currentItemRoots) {
|
||||
supersedes.push(...currentItemRoots[item])
|
||||
}
|
||||
|
||||
const content = { add: [...currentSet], del: [], supersedes }
|
||||
peer.db.create({ type, content }, (err) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to create msg when squeezing Set (${authorId}/${subtype})`, { cause: err }))
|
||||
cb(null, true)
|
||||
})
|
||||
const data = { add: [...currentSet], del: [], supersedes }
|
||||
peer.db.feed.publish(
|
||||
{ account: accountID, domain, data },
|
||||
(err, rec) => {
|
||||
// prettier-ignore
|
||||
if (err) return cb(new Error(`Failed to create msg when squeezing Set (${id}/${subdomain})`, { cause: err }))
|
||||
// @ts-ignore
|
||||
cb(null, true)
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
//#endregion
|
||||
|
||||
return {
|
||||
load,
|
||||
add,
|
||||
del,
|
||||
has,
|
||||
|
|
19
package.json
19
package.json
|
@ -28,19 +28,20 @@
|
|||
"devDependencies": {
|
||||
"bs58": "^5.0.0",
|
||||
"c8": "7",
|
||||
"ppppp-db": "github:staltz/ppppp-db#rev1",
|
||||
"ppppp-db": "github:staltz/ppppp-db",
|
||||
"ppppp-caps": "github:staltz/ppppp-caps",
|
||||
"ppppp-keypair": "github:staltz/ppppp-keypair",
|
||||
"rimraf": "^4.4.0",
|
||||
"secret-stack": "^6.4.1",
|
||||
"secret-stack": "~7.1.0",
|
||||
"secret-handshake-ext": "^0.0.8",
|
||||
"ssb-box": "^1.0.1",
|
||||
"ssb-caps": "^1.1.0",
|
||||
"ssb-classic": "^1.1.0",
|
||||
"ssb-keys": "^8.5.0",
|
||||
"ssb-uri2": "^2.4.1",
|
||||
"tap-arc": "^0.3.5",
|
||||
"tape": "^5.6.3"
|
||||
"typescript": "^5.1.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tape test/*.js | tap-arc --bail",
|
||||
"clean-check": "tsc --build --clean",
|
||||
"prepublishOnly": "npm run clean-check && tsc --build",
|
||||
"postpublish": "npm run clean-check",
|
||||
"test": "npm run clean-check && node --test",
|
||||
"format-code": "prettier --write \"(lib|test)/**/*.js\"",
|
||||
"format-code-staged": "pretty-quick --staged --pattern \"(lib|test)/**/*.js\"",
|
||||
"coverage": "c8 --reporter=lcov npm run test"
|
||||
|
|
|
@ -30,10 +30,10 @@ Reducing the tangle above in a topological sort allows you to build an array
|
|||
`msg.content` format:
|
||||
|
||||
```typescript
|
||||
interface MsgContent {
|
||||
interface MsgData {
|
||||
add: Array<string>,
|
||||
del: Array<string>,
|
||||
supersedes: Array<MsgHash>,
|
||||
supersedes: Array<MsgID>,
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
@ -1,103 +1,101 @@
|
|||
const test = require('tape')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
const test = require('node:test')
|
||||
const assert = require('node:assert')
|
||||
const path = require('node:path')
|
||||
const os = require('node:os')
|
||||
const rimraf = require('rimraf')
|
||||
const SecretStack = require('secret-stack')
|
||||
const FeedV1 = require('ppppp-db/feed-v1')
|
||||
const caps = require('ssb-caps')
|
||||
const p = require('util').promisify
|
||||
const { generateKeypair } = require('./util')
|
||||
const p = require('node:util').promisify
|
||||
const { createPeer } = require('./util')
|
||||
const Keypair = require('ppppp-keypair')
|
||||
|
||||
const DIR = path.join(os.tmpdir(), 'ppppp-set')
|
||||
rimraf.sync(DIR)
|
||||
|
||||
const aliceKeys = generateKeypair('alice')
|
||||
const who = aliceKeys.id
|
||||
const aliceKeypair = Keypair.generate('ed25519', 'alice')
|
||||
|
||||
let peer
|
||||
let aliceID
|
||||
test('setup', async (t) => {
|
||||
peer = SecretStack({ appKey: caps.shs })
|
||||
.use(require('ppppp-db'))
|
||||
.use(require('ssb-box'))
|
||||
.use(require('../lib'))
|
||||
.call(null, {
|
||||
keys: aliceKeys,
|
||||
path: DIR,
|
||||
})
|
||||
peer = createPeer({ keypair: aliceKeypair, path: DIR })
|
||||
|
||||
await peer.db.loaded()
|
||||
|
||||
aliceID = await p(peer.db.account.create)({
|
||||
domain: 'account',
|
||||
_nonce: 'alice',
|
||||
})
|
||||
await p(peer.set.load)(aliceID)
|
||||
})
|
||||
|
||||
function lastMsgHash() {
|
||||
function lastMsgID() {
|
||||
let last
|
||||
for (const item of peer.db.records()) {
|
||||
last = item
|
||||
}
|
||||
return last.hash
|
||||
return last.id
|
||||
}
|
||||
|
||||
let add1, add2, del1, add3, del2
|
||||
test('Set add(), del(), has()', async (t) => {
|
||||
// Add 1st
|
||||
t.false(peer.set.has(who, 'follows', '1st'), 'doesnt have 1st')
|
||||
t.ok(await p(peer.set.add)(who, 'follows', '1st'), 'add 1st')
|
||||
t.true(peer.set.has(who, 'follows', '1st'), 'has 1st')
|
||||
add1 = lastMsgHash()
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '1st'), false, 'doesnt have 1st')
|
||||
assert(await p(peer.set.add)(aliceID, 'follows', '1st'), 'add 1st')
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '1st'), true, 'has 1st')
|
||||
add1 = lastMsgID()
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '1st': [add1] },
|
||||
'itemRoots'
|
||||
)
|
||||
|
||||
// Add 2nd
|
||||
t.false(peer.set.has(who, 'follows', '2nd'), 'doesnt have 2nd')
|
||||
t.ok(await p(peer.set.add)(who, 'follows', '2nd'), 'add 2nd')
|
||||
t.true(peer.set.has(who, 'follows', '2nd'), 'has 2nd')
|
||||
add2 = lastMsgHash()
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '2nd'), false, 'doesnt have 2nd')
|
||||
assert(await p(peer.set.add)(aliceID, 'follows', '2nd'), 'add 2nd')
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '2nd'), true, 'has 2nd')
|
||||
add2 = lastMsgID()
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '1st': [add1], '2nd': [add2] },
|
||||
'itemRoots'
|
||||
)
|
||||
|
||||
// Del 1st
|
||||
t.true(peer.set.has(who, 'follows', '1st'), 'has 1st')
|
||||
t.ok(await p(peer.set.del)(who, 'follows', '1st'), 'del 1st')
|
||||
t.false(peer.set.has(who, 'follows', '1st'), 'doesnt have 1st')
|
||||
del1 = lastMsgHash()
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '1st'), true, 'has 1st')
|
||||
assert(await p(peer.set.del)(aliceID, 'follows', '1st'), 'del 1st')
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '1st'), false, 'doesnt have 1st')
|
||||
del1 = lastMsgID()
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '1st': [del1], '2nd': [add2] },
|
||||
'itemRoots'
|
||||
)
|
||||
|
||||
// Add 3rd
|
||||
t.false(peer.set.has(who, 'follows', '3rd'), 'doesnt have 3rd')
|
||||
t.ok(await p(peer.set.add)(who, 'follows', '3rd'), 'add 3rd')
|
||||
t.true(peer.set.has(who, 'follows', '3rd'), 'has 3rd')
|
||||
add3 = lastMsgHash()
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '3rd'), false, 'doesnt have 3rd')
|
||||
assert(await p(peer.set.add)(aliceID, 'follows', '3rd'), 'add 3rd')
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '3rd'), true, 'has 3rd')
|
||||
add3 = lastMsgID()
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '3rd': [add3], '2nd': [add2] },
|
||||
'itemRoots'
|
||||
)
|
||||
|
||||
// Del 2nd
|
||||
t.true(peer.set.has(who, 'follows', '2nd'), 'has 2nd')
|
||||
t.ok(await p(peer.set.del)(who, 'follows', '2nd'), 'del 2nd') // msg seq 4
|
||||
t.false(peer.set.has(who, 'follows', '2nd'), 'doesnt have 2nd')
|
||||
del2 = lastMsgHash()
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '2nd'), true, 'has 2nd')
|
||||
assert(await p(peer.set.del)(aliceID, 'follows', '2nd'), 'del 2nd') // msg seq 4
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '2nd'), false, 'doesnt have 2nd')
|
||||
del2 = lastMsgID()
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '3rd': [add3], '2nd': [del2] },
|
||||
'itemRoots'
|
||||
)
|
||||
|
||||
// Del 2nd (idempotent)
|
||||
t.notOk(await p(peer.set.del)(who, 'follows', '2nd'), 'del 2nd idempotent')
|
||||
t.false(peer.set.has(who, 'follows', '2nd'), 'doesnt have 2nd')
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.equal(await p(peer.set.del)(aliceID, 'follows', '2nd'), false, 'del 2nd idempotent')
|
||||
assert.equal(peer.set.has(aliceID, 'follows', '2nd'), false, 'doesnt have 2nd')
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '3rd': [add3], '2nd': [del2] },
|
||||
'itemRoots'
|
||||
)
|
||||
|
@ -105,44 +103,44 @@ test('Set add(), del(), has()', async (t) => {
|
|||
|
||||
let add4, add5
|
||||
test('Set values()', async (t) => {
|
||||
t.ok(await p(peer.set.add)(who, 'follows', '4th'), 'add 4th')
|
||||
add4 = lastMsgHash()
|
||||
t.ok(await p(peer.set.add)(who, 'follows', '5th'), 'add 5th')
|
||||
add5 = lastMsgHash()
|
||||
assert(await p(peer.set.add)(aliceID, 'follows', '4th'), 'add 4th')
|
||||
add4 = lastMsgID()
|
||||
assert(await p(peer.set.add)(aliceID, 'follows', '5th'), 'add 5th')
|
||||
add5 = lastMsgID()
|
||||
|
||||
const expected = new Set(['3rd', '4th', '5th'])
|
||||
for (const item of peer.set.values(who, 'follows')) {
|
||||
t.true(expected.has(item), 'values() item')
|
||||
for (const item of peer.set.values(aliceID, 'follows')) {
|
||||
assert.equal(expected.has(item), true, 'values() item')
|
||||
expected.delete(item)
|
||||
}
|
||||
t.equals(expected.size, 0, 'all items')
|
||||
assert.equal(expected.size, 0, 'all items')
|
||||
})
|
||||
|
||||
test('predsl Set squeeze', async (t) => {
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '3rd': [add3], '4th': [add4], '5th': [add5] },
|
||||
'itemRoots before squeeze'
|
||||
)
|
||||
|
||||
t.equals(peer.set._squeezePotential('follows'), 3, 'squeezePotential=3')
|
||||
assert.equal(peer.set._squeezePotential('follows'), 3, 'squeezePotential=3')
|
||||
|
||||
t.true(await p(peer.set.squeeze)(who, 'follows'), 'squeezed')
|
||||
const squeezed = lastMsgHash()
|
||||
assert.equal(await p(peer.set.squeeze)(aliceID, 'follows'), true, 'squeezed')
|
||||
const squeezed = lastMsgID()
|
||||
|
||||
t.equals(peer.set._squeezePotential('follows'), 0, 'squeezePotential=0')
|
||||
assert.equal(peer.set._squeezePotential('follows'), 0, 'squeezePotential=0')
|
||||
|
||||
t.deepEquals(
|
||||
peer.set.getItemRoots(who, 'follows'),
|
||||
assert.deepEqual(
|
||||
peer.set.getItemRoots(aliceID, 'follows'),
|
||||
{ '3rd': [squeezed], '4th': [squeezed], '5th': [squeezed] },
|
||||
'itemRoots after squeeze'
|
||||
)
|
||||
|
||||
t.false(await p(peer.set.squeeze)(who, 'follows'), 'squeeze again idempotent')
|
||||
const squeezed2 = lastMsgHash()
|
||||
t.equals(squeezed, squeezed2, 'squeezed msg hash is same')
|
||||
assert.equal(await p(peer.set.squeeze)(aliceID, 'follows'), false, 'squeeze again idempotent')
|
||||
const squeezed2 = lastMsgID()
|
||||
assert.equal(squeezed, squeezed2, 'squeezed msgID is same')
|
||||
})
|
||||
|
||||
test('teardown', (t) => {
|
||||
peer.close(t.end)
|
||||
test('teardown', async (t) => {
|
||||
await p(peer.close)(true)
|
||||
})
|
||||
|
|
45
test/util.js
45
test/util.js
|
@ -1,14 +1,37 @@
|
|||
const ssbKeys = require('ssb-keys')
|
||||
const SSBURI = require('ssb-uri2')
|
||||
const base58 = require('bs58')
|
||||
const os = require('node:os')
|
||||
const path = require('node:path')
|
||||
const rimraf = require('rimraf')
|
||||
const caps = require('ppppp-caps')
|
||||
const Keypair = require('ppppp-keypair')
|
||||
|
||||
function generateKeypair(seed) {
|
||||
const keys = ssbKeys.generate('ed25519', seed, 'buttwoo-v1')
|
||||
const { data } = SSBURI.decompose(keys.id)
|
||||
keys.id = `ppppp:feed/v1/${base58.encode(Buffer.from(data, 'base64'))}`
|
||||
return keys
|
||||
function createPeer(opts) {
|
||||
if (opts.name) {
|
||||
opts.path ??= path.join(os.tmpdir(), 'ppppp-set-' + opts.name)
|
||||
opts.keypair ??= Keypair.generate('ed25519', opts.name)
|
||||
opts.name = undefined
|
||||
}
|
||||
if (!opts.path) throw new Error('need opts.path in createPeer()')
|
||||
if (!opts.keypair) throw new Error('need opts.keypair in createPeer()')
|
||||
|
||||
rimraf.sync(opts.path)
|
||||
return require('secret-stack/bare')()
|
||||
.use(require('secret-stack/plugins/net'))
|
||||
.use(require('secret-handshake-ext/secret-stack'))
|
||||
.use(require('ppppp-db'))
|
||||
.use(require('ssb-box'))
|
||||
.use(require('../lib'))
|
||||
.call(null, {
|
||||
caps,
|
||||
connections: {
|
||||
incoming: {
|
||||
net: [{ scope: 'device', transform: 'shse', port: null }],
|
||||
},
|
||||
outgoing: {
|
||||
net: [{ transform: 'shse' }],
|
||||
},
|
||||
},
|
||||
...opts,
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
generateKeypair,
|
||||
}
|
||||
module.exports = { createPeer }
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"include": ["lib/**/*.js"],
|
||||
"exclude": ["coverage/", "node_modules/", "test/"],
|
||||
"compilerOptions": {
|
||||
"checkJs": true,
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"exactOptionalPropertyTypes": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"lib": ["es2022", "dom"],
|
||||
"module": "node16",
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"target": "es2021"
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue