msg-v2 with group tangles

This commit is contained in:
Andre Staltz 2023-05-25 15:52:11 +03:00
parent 975769134c
commit 8d0d3cf175
No known key found for this signature in database
GPG Key ID: 9EDE23EA7E8A4890
25 changed files with 2002 additions and 242 deletions

View File

@ -1,4 +1,4 @@
const FeedV1 = require('./feed-v1')
const MsgV2 = require('./msg-v2')
/**
* @typedef {import('./index').Rec} Rec
@ -17,20 +17,20 @@ function ciphertextStrToBuffer(str) {
*/
function decrypt(rec, peer, config) {
const msgEncrypted = rec.msg
const { content } = msgEncrypted
if (typeof content !== 'string') return rec
const { data } = msgEncrypted
if (typeof data !== 'string') return rec
const encryptionFormat = peer.db.findEncryptionFormatFor(content)
const encryptionFormat = peer.db.findEncryptionFormatFor(data)
if (!encryptionFormat) return rec
// Decrypt
const ciphertextBuf = ciphertextStrToBuffer(content)
const ciphertextBuf = ciphertextStrToBuffer(data)
const opts = { keys: config.keys }
const plaintextBuf = encryptionFormat.decrypt(ciphertextBuf, opts)
if (!plaintextBuf) return rec
// Reconstruct KVT in JS encoding
const msgDecrypted = FeedV1.fromPlaintextBuffer(plaintextBuf, msgEncrypted)
const msgDecrypted = MsgV2.fromPlaintextBuffer(plaintextBuf, msgEncrypted)
return {
hash: rec.hash,
@ -39,7 +39,7 @@ function decrypt(rec, peer, config) {
misc: {
...rec.misc,
private: true,
originalContent: content,
originalData: data,
encryptionFormat: encryptionFormat.name,
},
}
@ -48,7 +48,7 @@ function decrypt(rec, peer, config) {
function reEncrypt(rec) {
return {
hash: rec.hash,
msg: { ...rec.msg, content: rec.misc.originalContent },
msg: { ...rec.msg, data: rec.misc.originalData },
received: rec.received,
...(rec.misc.size
? {

View File

@ -3,12 +3,12 @@ const push = require('push-stream')
const AAOL = require('async-append-only-log')
const promisify = require('promisify-4loc')
const Obz = require('obz')
const FeedV1 = require('./feed-v1')
const MsgV2 = require('./msg-v2')
const { ReadyGate } = require('./utils')
const { decrypt } = require('./encryption')
/**
* @typedef {import('./feed-v1').Msg} Msg
* @typedef {import('./msg-v2').Msg} Msg
*/
/**
@ -32,7 +32,7 @@ const { decrypt } = require('./encryption')
* @property {number} misc.size
* @property {number} misc.seq
* @property {boolean=} misc.private
* @property {Object=} misc.originalContent
* @property {Object=} misc.originalData
* @property {string=} misc.encryptionFormat
*/
@ -40,7 +40,7 @@ const { decrypt } = require('./encryption')
* @typedef {RecPresent | RecDeleted} Rec
*/
class DBTangle extends FeedV1.Tangle {
class DBTangle extends MsgV2.Tangle {
/**
* @param {string} rootHash
* @param {Iterable<Rec>} recordsIter
@ -184,21 +184,34 @@ exports.init = function initDB(peer, config) {
}
function add(msg, tangleRootHash, cb) {
// TODO: optimize this. This may be slow if you're adding many msgs in a
// row, because it creates a new Map() each time. Perhaps with QuickLRU
const tangle = new DBTangle(tangleRootHash, records())
const msgHash = FeedV1.getMsgHash(msg)
const msgHash = MsgV2.getMsgHash(msg)
// TODO: optimize this. Perhaps have a Map() of msgHash -> record
// Or even better, a bloom filter. If you just want to answer no/perhaps.
let rec
if ((rec = getRecord(msgHash))) return cb(null, rec)
// TODO: optimize this. This may be slow if you're adding many msgs in a
// row, because it creates a new Map() each time. Perhaps with QuickLRU
const tangle = new DBTangle(tangleRootHash, records())
const pubkeys = new Set()
if (msg.metadata.group) {
const groupTangle = new DBTangle(msg.metadata.group, records())
if (!groupTangle.has(msg.metadata.group)) {
// prettier-ignore
return cb(new Error('add() failed because the group tangle is unknown'))
}
for (const msgHash of groupTangle.topoSort()) {
const msg = get(msgHash)
if (!msg?.data?.add) continue
pubkeys.add(msg.data.add)
}
}
let err
if ((err = FeedV1.validate(msg, tangle, msgHash, tangleRootHash))) {
// prettier-ignore
return cb(new Error('add() failed validation for feed format v1', {cause: err}))
if ((err = MsgV2.validate(msg, tangle, pubkeys, msgHash, tangleRootHash))) {
return cb(new Error('add() failed msg validation', { cause: err }))
}
logAppend(msgHash, msg, (err, rec) => {
@ -209,87 +222,142 @@ exports.init = function initDB(peer, config) {
}
function initializeFeed(opts, cb) {
if (!opts.type) return cb(new Error('initializeFeed() requires a `type`'))
const keys = opts.keys ?? config.keys
const type = opts.type
const { group, type } = opts
const feedRootHash = getFeedRoot(FeedV1.stripAuthor(keys.id), type)
const feedRootHash = getFeedRoot(group, type)
if (feedRootHash) return cb(null, feedRootHash)
const feedRoot = FeedV1.createRoot(keys, type)
add(feedRoot, FeedV1.getMsgHash(feedRoot), (err, rec) => {
const feedRoot = MsgV2.createRoot(group, type, keys)
add(feedRoot, MsgV2.getMsgHash(feedRoot), (err, rec) => {
// prettier-ignore
if (err) return cb(new Error('initializeFeed() failed to add root', { cause: err }));
cb(null, rec.hash)
})
}
function create(opts, cb) {
function createGroup(opts, cb) {
const keys = opts?.keys ?? config.keys
let msg
try {
msg = MsgV2.createGroup(keys)
} catch (err) {
return cb(new Error('group.create() failed', { cause: err }))
}
const msgHash = MsgV2.getMsgHash(msg)
logAppend(msgHash, msg, (err, rec) => {
// prettier-ignore
if (err) return cb(new Error('group.create() failed in the log', { cause: err }))
onRecordAdded.set(rec)
cb(null, rec)
})
}
function addToGroup(opts, cb) {
if (!opts?.keys) return cb(new Error('addToGroup() requires a `keys`'))
if (!opts?.group) return cb(new Error('addToGroup() requires a `group`'))
const keys = opts.keys
// Fill-in tangle opts:
const tangleTemplates = opts.tangles ?? []
tangleTemplates.push(opts.group)
const tangles = populateTangles(tangleTemplates)
const fullOpts = {
...opts,
tangles,
keys,
data: { add: keys.id },
type: 'group',
}
// Create the actual message:
let msg
try {
msg = MsgV2.create(fullOpts)
} catch (err) {
return cb(new Error('group.add() failed', { cause: err }))
}
const msgHash = MsgV2.getMsgHash(msg)
logAppend(msgHash, msg, (err, rec) => {
// prettier-ignore
if (err) return cb(new Error('group.add() failed to append the log', { cause: err }))
onRecordAdded.set(rec)
cb(null, rec)
})
}
function publishToFeed(opts, cb) {
const keys = opts.keys ?? config.keys
const encryptionFormat = encryptionFormats.get(opts.encryptionFormat)
// prettier-ignore
if (opts.content.recps) {
if (opts.data.recps) {
if (!encryptionFormat) {
return cb(new Error(`create() does not support encryption format "${opts.encryptionFormat}"`))
// prettier-ignore
return cb(new Error(`feed.publish() does not support encryption format "${opts.encryptionFormat}"`))
}
}
if (!opts.content) return cb(new Error('create() requires a `content`'))
if (!opts.type) return cb(new Error('create() requires a `type`'))
if (!opts.data) return cb(new Error('feed.publish() requires a `data`'))
if (!opts.type) return cb(new Error('feed.publish() requires a `type`'))
if (!opts.group) return cb(new Error('feed.publish() requires a `group`'))
initializeFeed(opts, (err, feedRootHash) => {
// prettier-ignore
if (err) return cb(new Error('create() failed to initialize feed', { cause: err }));
if (err) return cb(new Error('feed.publish() failed to initialize feed', { cause: err }));
// Fill-in tangle opts:
const tangleTemplates = opts.tangles ?? []
tangleTemplates.push(feedRootHash)
const tangles = populateTangles(tangleTemplates)
const fullOpts = { ...opts, tangles, keys }
const groupTangle = new DBTangle(opts.group, records())
const groupTips = [...groupTangle.getTips()]
const fullOpts = { ...opts, tangles, groupTips, keys }
// If opts ask for encryption, encrypt and put ciphertext in opts.content
const recps = fullOpts.content.recps
// If opts ask for encryption, encrypt and put ciphertext in opts.data
const recps = fullOpts.data.recps
if (Array.isArray(recps) && recps.length > 0) {
const plaintext = FeedV1.toPlaintextBuffer(fullOpts)
const plaintext = MsgV2.toPlaintextBuffer(fullOpts)
const encryptOpts = { ...fullOpts, recps }
let ciphertextBuf
try {
ciphertextBuf = encryptionFormat.encrypt(plaintext, encryptOpts)
} catch (err) {
// prettier-ignore
return cb(new Error('create() failed to encrypt content', {cause: err}));
return cb(new Error('feed.publish() failed to encrypt data', {cause: err}));
}
if (!ciphertextBuf) {
// prettier-ignore
return cb(new Error('create() failed to encrypt with ' + encryptionFormat.name))
return cb(new Error('feed.publish() failed to encrypt with ' + encryptionFormat.name))
}
const ciphertextBase64 = ciphertextBuf.toString('base64')
fullOpts.content = ciphertextBase64 + '.' + encryptionFormat.name
fullOpts.data = ciphertextBase64 + '.' + encryptionFormat.name
}
// Create the actual message:
let msg
try {
msg = FeedV1.create(fullOpts)
msg = MsgV2.create(fullOpts)
} catch (err) {
return cb(new Error('create() failed', { cause: err }))
return cb(new Error('feed.publish() failed', { cause: err }))
}
const msgHash = FeedV1.getMsgHash(msg)
const msgHash = MsgV2.getMsgHash(msg)
// Encode the native message and append it to the log:
logAppend(msgHash, msg, (err, rec) => {
// prettier-ignore
if (err) return cb(new Error('create() failed to append the log', { cause: err }))
if (err) return cb(new Error('feed.publish() failed to append the log', { cause: err }))
onRecordAdded.set(rec)
cb(null, rec)
})
})
}
function getFeedRoot(authorId, findType) {
const findWho = FeedV1.stripAuthor(authorId)
function getFeedRoot(groupId, findType) {
const findGroup = MsgV2.stripGroup(groupId)
for (const rec of records()) {
if (FeedV1.isFeedRoot(rec.msg, findWho, findType)) return rec.hash
if (MsgV2.isFeedRoot(rec.msg, findGroup, findType)) return rec.hash
}
return null
}
@ -326,8 +394,8 @@ exports.init = function initDB(peer, config) {
const rec = getRecord(msgId)
if (!rec) return cb()
if (!rec.msg) return cb()
if (!rec.msg.content) return cb()
recs[rec.misc.seq].msg = FeedV1.erase(rec.msg)
if (!rec.msg.data) return cb()
recs[rec.misc.seq].msg = MsgV2.erase(rec.msg)
// FIXME: persist this change to disk!! Not supported by AAOL yet
cb()
}
@ -338,10 +406,10 @@ exports.init = function initDB(peer, config) {
function validateTangle(tangleId, msgs) {
let err
const tangle = new FeedV1.Tangle(tangleId)
const tangle = new MsgV2.Tangle(tangleId)
for (const msg of msgs) {
const msgHash = FeedV1.getMsgHash(msg)
if ((err = FeedV1.validate(msg, tangle, msgHash, tangleId))) return err
const msgHash = MsgV2.getMsgHash(msg)
if ((err = MsgV2.validate(msg, tangle, msgHash, tangleId))) return err
tangle.add(msgHash, msg)
}
}
@ -365,8 +433,14 @@ exports.init = function initDB(peer, config) {
installEncryptionFormat,
loaded,
add,
create,
getFeedRoot,
group: {
create: createGroup,
add: addToGroup,
},
feed: {
publish: publishToFeed,
getRoot: getFeedRoot,
},
getRecord,
get,
del,

53
lib/msg-v2/get-msg-id.js Normal file
View File

@ -0,0 +1,53 @@
const blake3 = require('blake3')
const base58 = require('bs58')
const stringify = require('json-canon')
/**
* @typedef {import('./index').Msg} Msg
*/
/**
* @param {Msg} msg
* @returns {Buffer}
*/
function getMsgHashBuf(msg) {
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
return blake3.hash(metadataBuf).subarray(0, 16)
}
/**
* @param {Msg | string} x
* @returns {string}
*/
function getMsgHash(x) {
if (typeof x === 'string') {
if (x.startsWith('ppppp:message/v2/')) {
const msgUri = x
const parts = msgUri.split('/')
return parts[parts.length - 1]
} else {
const msgHash = x
return msgHash
}
} else {
const msg = x
const msgHashBuf = getMsgHashBuf(msg)
return base58.encode(msgHashBuf)
}
}
/**
* @param {Msg} msg
* @returns {string}
*/
function getMsgId(msg) {
const { group, type } = msg.metadata
const msgHash = getMsgHash(msg)
if (type) {
return `ppppp:message/v2/${group}/${type}/${msgHash}`
} else {
return `ppppp:message/v2/${group}/${msgHash}`
}
}
module.exports = { getMsgId, getMsgHash }

223
lib/msg-v2/index.js Normal file
View File

@ -0,0 +1,223 @@
const crypto = require('crypto')
const stringify = require('json-canon')
const ed25519 = require('ssb-keys/sodium')
const base58 = require('bs58')
const union = require('set.prototype.union')
const { stripGroup } = require('./strip')
const isFeedRoot = require('./is-feed-root')
const { getMsgId, getMsgHash } = require('./get-msg-id')
const representData = require('./represent-data')
const {
validateType,
validateData,
validate,
validateBatch,
validateMsgHash,
} = require('./validation')
const Tangle = require('./tangle')
/**
* @typedef {Iterator<Msg> & {values: () => Iterator<Msg>}} MsgIter
*/
/**
* @typedef {Object} TangleMetadata
* @property {number} depth
* @property {Array<string>} prev
*/
/**
* @typedef {Object} Msg
* @property {*} data
* @property {Object} metadata
* @property {string} metadata.dataHash
* @property {number} metadata.dataSize
* @property {string | null} metadata.group
* @property {Array<string> | null} metadata.groupTips
* @property {Record<string, TangleMetadata>} metadata.tangles
* @property {string} metadata.type
* @property {2} metadata.v
* @property {string} pubkey
* @property {string} sig
*/
/**
* @typedef {Object} Keys
* @property {string} keys.id
* @property {string} keys.private
*/
/**
* @typedef {Object} CreateOpts
* @property {*} data
* @property {string} type
* @property {Keys} keys
* @property {string | null} group
* @property {Array<string> | null} groupTips
* @property {Record<string, Tangle>} tangles
*/
function getFeedRootHash(groupId, type) {
const group = stripGroup(groupId)
const msg = {
data: null,
metadata: {
dataHash: null,
dataSize: 0,
group,
groupTips: null,
tangles: {},
type,
v: 2,
},
pubkey: '',
sig: '',
}
return getMsgHash(msg)
}
function toPlaintextBuffer(opts) {
return Buffer.from(stringify(opts.data), 'utf8')
}
/**
* @param {CreateOpts} opts
* @returns {Msg}
*/
function create(opts) {
let err
if ((err = validateType(opts.type))) throw err
if (!opts.tangles) throw new Error('opts.tangles is required')
const [dataHash, dataSize] = representData(opts.data)
const group = opts.group ? stripGroup(opts.group) : null
const groupTips = opts.groupTips ? opts.groupTips.sort() : null
const tangles = {}
if (opts.tangles) {
for (const rootId in opts.tangles) {
if ((err = validateMsgHash(rootId))) throw err
const tangle = opts.tangles[rootId]
const depth = tangle.getMaxDepth() + 1
const tips = tangle.getTips()
const lipmaaSet = tangle.getLipmaaSet(depth)
const prev = [...union(lipmaaSet, tips)].sort()
tangles[rootId] = { depth, prev }
}
} else {
// prettier-ignore
throw new Error(`cannot create msg without tangles, that's the case for createRoot()`)
}
const msg = {
data: opts.data,
metadata: {
dataHash,
dataSize,
group,
groupTips,
tangles,
type: opts.type,
v: 2,
},
pubkey: opts.keys.id,
sig: '',
}
if ((err = validateData(msg))) throw err
const privateKey = Buffer.from(opts.keys.private, 'base64')
// TODO: add a label prefix to the metadata before signing
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
// TODO: when signing, what's the point of a customizable hmac?
const sigBuf = ed25519.sign(privateKey, metadataBuf)
msg.sig = base58.encode(sigBuf)
return msg
}
/**
* @param {string} group
* @param {string} type
* @param {Keys} keys
* @returns {Msg}
*/
function createRoot(group, type, keys) {
let err
if ((err = validateType(type))) throw err
const msg = {
data: null,
metadata: {
dataHash: null,
dataSize: 0,
group,
groupTips: null,
tangles: {},
type,
v: 2,
},
pubkey: keys.id,
sig: '',
}
const privateKey = Buffer.from(keys.private, 'base64')
// TODO: add a label prefix to the metadata before signing
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
// TODO: when signing, what's the point of a customizable hmac?
const sigBuf = ed25519.sign(privateKey, metadataBuf)
msg.sig = base58.encode(sigBuf)
return msg
}
/**
* @param {Keys} keys
* @param {string} nonce
* @returns {Msg}
*/
function createGroup(keys, nonce = base58.encode(crypto.randomBytes(32))) {
return create({
data: { add: keys.id, nonce },
group: null,
groupTips: null,
keys,
tangles: {},
type: 'group',
})
}
/**
* @param {Msg} msg
* @returns {Msg}
*/
function erase(msg) {
return { ...msg, data: null }
}
/**
* @param {Buffer} plaintextBuf
* @param {Msg} msg
* @returns {Msg}
*/
function fromPlaintextBuffer(plaintextBuf, msg) {
return { ...msg, data: JSON.parse(plaintextBuf.toString('utf-8')) }
}
module.exports = {
getMsgHash,
getMsgId,
isFeedRoot,
getFeedRootHash,
create,
createRoot,
createGroup,
erase,
stripGroup,
toPlaintextBuffer,
fromPlaintextBuffer,
Tangle,
validate,
validateBatch,
}

View File

@ -0,0 +1,22 @@
const { stripGroup } = require('./strip')
function isEmptyObject(obj) {
for (const _key in obj) {
return false
}
return true
}
function isFeedRoot(msg, groupId = 0, findType = 0) {
const { dataHash, dataSize, group, groupTips, tangles, type } = msg.metadata
if (dataHash !== null) return false
if (dataSize !== 0) return false
if (groupId === 0 && !group) return false
if (groupId !== 0 && group !== stripGroup(groupId)) return false
if (groupTips !== null) return false
if (!isEmptyObject(tangles)) return false
if (findType !== 0 && type !== findType) return false
return true
}
module.exports = isFeedRoot

View File

@ -0,0 +1,16 @@
const blake3 = require('blake3')
const base58 = require('bs58')
const stringify = require('json-canon')
/**
* @param {any} data
* @returns {[string, number]}
*/
function representData(data) {
const dataBuf = Buffer.from(stringify(data), 'utf8')
const dataHash = base58.encode(blake3.hash(dataBuf).subarray(0, 16))
const dataSize = dataBuf.length
return [dataHash, dataSize]
}
module.exports = representData

29
lib/msg-v2/strip.js Normal file
View File

@ -0,0 +1,29 @@
const { getMsgHash } = require('./get-msg-id')
function stripMsgKey(msgKey) {
if (typeof msgKey === 'object') {
if (msgKey.key) return stripMsgKey(msgKey.key)
else return getMsgHash(msgKey)
}
if (msgKey.startsWith('ppppp:message/v2/')) {
const parts = msgKey.split('/')
return parts[parts.length - 1]
} else {
return msgKey
}
}
/**
* @param {string} id
* @returns {string}
*/
function stripGroup(id) {
if (id.startsWith('ppppp:group/v2/') === false) return id
const withoutPrefix = id.replace('ppppp:group/v2/', '')
return withoutPrefix.split('/')[0]
}
module.exports = {
stripMsgKey,
stripGroup,
}

265
lib/msg-v2/tangle.js Normal file
View File

@ -0,0 +1,265 @@
/**
* @typedef {import("./index").Msg} Msg
*/
function lipmaa(n) {
let m = 1
let po3 = 3
let u = n
// find k such that (3^k - 1)/2 >= n
while (m < n) {
po3 *= 3
m = (po3 - 1) / 2
}
// find longest possible backjump
po3 /= 3
if (m !== n) {
while (u !== 0) {
m = (po3 - 1) / 2
po3 /= 3
u %= m
}
if (m !== po3) {
po3 = m
}
}
return n - po3
}
/**
* @param {string} a
* @param {string} b
* @returns number
*/
function compareMsgHashes(a, b) {
return a.localeCompare(b)
}
class Tangle {
/**
* @type {string}
*/
#rootHash
/**
* @type {Msg}
*/
#rootMsg
/**
* @type {Set<string>}
*/
#tips = new Set()
/**
* @type {Map<string, Array<string>>}
*/
#prev = new Map()
/**
* @type {Map<string, number>}
*/
#depth = new Map()
/**
* @type {Map<number, Array<string>>}
*/
#perDepth = new Map()
/**
* @type {number}
*/
#maxDepth
/**
* @param {string} rootHash
* @param {Iterable<Msg>} msgsIter
*/
constructor(rootHash) {
this.#rootHash = rootHash
this.#maxDepth = 0
}
add(msgHash, msg) {
if (msgHash === this.#rootHash && !this.#rootMsg) {
this.#tips.add(msgHash)
this.#perDepth.set(0, [msgHash])
this.#depth.set(msgHash, 0)
this.#rootMsg = msg
return
}
const tangles = msg.metadata.tangles
if (msgHash !== this.#rootHash && tangles[this.#rootHash]) {
if (this.#depth.has(msgHash)) return
this.#tips.add(msgHash)
const prev = tangles[this.#rootHash].prev
for (const p of prev) {
this.#tips.delete(p)
}
this.#prev.set(msgHash, prev)
const depth = tangles[this.#rootHash].depth
if (depth > this.#maxDepth) this.#maxDepth = depth
this.#depth.set(msgHash, depth)
const atDepth = this.#perDepth.get(depth) ?? []
atDepth.push(msgHash)
atDepth.sort(compareMsgHashes)
this.#perDepth.set(depth, atDepth)
return
}
}
/**
* @param {number} depth
* @returns {Array<string>}
*/
#getAllAtDepth(depth) {
return this.#perDepth.get(depth) ?? []
}
/**
* @returns {Array<string>}
*/
topoSort() {
if (!this.#rootMsg) {
console.trace('Tangle is missing root message')
return []
}
const sorted = []
const max = this.#maxDepth
for (let i = 0; i <= max; i++) {
const atDepth = this.#getAllAtDepth(i)
for (const msgHash of atDepth) {
sorted.push(msgHash)
}
}
return sorted
}
/**
* @returns {Set<string>}
*/
getTips() {
if (!this.#rootMsg) {
console.trace('Tangle is missing root message')
return new Set()
}
return this.#tips
}
/**
* @param {number} depth
* @returns {Set<string>}
*/
getLipmaaSet(depth) {
if (!this.#rootMsg) {
console.trace('Tangle is missing root message')
return new Set()
}
const lipmaaDepth = lipmaa(depth + 1) - 1
return new Set(this.#getAllAtDepth(lipmaaDepth))
}
/**
* @param {string} msgHash
* @returns {boolean}
*/
has(msgHash) {
return this.#depth.has(msgHash)
}
/**
* @param {string} msgHash
* @returns {number}
*/
getDepth(msgHash) {
return this.#depth.get(msgHash) ?? -1
}
isFeed() {
if (!this.#rootMsg) {
console.trace('Tangle is missing root message')
return false
}
if (this.#rootMsg.data) return false
const metadata = this.#rootMsg.metadata
if (metadata.dataSize > 0) return false
if (metadata.dataHash !== null) return false
if (metadata.groupTips !== null) return false
return true
}
getFeed() {
if (!this.isFeed()) return null
const { group, type } = this.#rootMsg.metadata
return { group, type }
}
shortestPathToRoot(msgHash) {
if (!this.#rootMsg) {
console.trace('Tangle is missing root message')
return []
}
const path = []
let current = msgHash
while (true) {
const prev = this.#prev.get(current)
if (!prev) break
let minDepth = this.#depth.get(current)
let min = current
for (const p of prev) {
const d = this.#depth.get(p)
if (d < minDepth) {
minDepth = d
min = p
} else if (d === minDepth && compareMsgHashes(p, min) < 0) {
min = p
}
}
path.push(min)
current = min
}
return path
}
precedes(a, b) {
if (!this.#rootMsg) {
console.trace('Tangle is missing root message')
return false
}
if (a === b) return false
if (b === this.#rootHash) return false
let toCheck = [b]
while (toCheck.length > 0) {
const prev = this.#prev.get(toCheck.shift())
if (!prev) continue
if (prev.includes(a)) return true
toCheck.push(...prev)
}
return false
}
size() {
return this.#depth.size
}
getMaxDepth() {
return this.#maxDepth
}
debug() {
let str = ''
const max = this.#maxDepth
for (let i = 0; i <= max; i++) {
const atDepth = this.#getAllAtDepth(i)
str += `Depth ${i}: ${atDepth.join(', ')}\n`
}
return str
}
}
module.exports = Tangle

283
lib/msg-v2/validation.js Normal file
View File

@ -0,0 +1,283 @@
const base58 = require('bs58')
const ed25519 = require('ssb-keys/sodium')
const stringify = require('json-canon')
const Tangle = require('./tangle')
const representData = require('./represent-data')
const isFeedRoot = require('./is-feed-root')
function validateShape(msg) {
if (!msg || typeof msg !== 'object') {
return 'invalid message: not an object\n' + JSON.stringify(msg)
}
if (!('data' in msg)) {
return 'invalid message: must have data\n' + JSON.stringify(msg)
}
if (!msg.metadata || typeof msg.metadata !== 'object') {
return 'invalid message: must have metadata\n' + JSON.stringify(msg)
}
if (!('dataHash' in msg.metadata)) {
// prettier-ignore
return 'invalid message: must have metadata.dataHash\n' + JSON.stringify(msg)
}
if (!('dataSize' in msg.metadata)) {
// prettier-ignore
return 'invalid message: must have metadata.dataSize\n' + JSON.stringify(msg)
}
if (!('group' in msg.metadata)) {
return 'invalid message: must have metadata.group\n' + JSON.stringify(msg)
}
if (!('groupTips' in msg.metadata)) {
// prettier-ignore
return 'invalid message: must have metadata.groupTips\n' + JSON.stringify(msg)
}
if (!('tangles' in msg.metadata)) {
return 'invalid message: must have metadata.tangles\n' + JSON.stringify(msg)
}
if (!('type' in msg.metadata)) {
return 'invalid message: must have metadata.type\n' + JSON.stringify(msg)
}
if (msg.metadata.v !== 2) {
return 'invalid message: must have metadata.v 2\n' + JSON.stringify(msg)
}
if (typeof msg.sig !== 'string') {
return 'invalid message: must have sig\n' + JSON.stringify(msg)
}
}
function validatePubkey(msg, pubkeys) {
const { pubkey } = msg
if (typeof pubkey !== 'string') {
// prettier-ignore
return `invalid message: pubkey "${pubkey}" should have been a string\n` + JSON.stringify(msg)
}
try {
const pubkeyBuf = base58.decode(pubkey)
if (pubkeyBuf.length !== 32) {
// prettier-ignore
return `invalid message: decoded "pubkey" should be 32 bytes but was ${pubkeyBuf.length}\n` + JSON.stringify(msg)
}
} catch (err) {
// prettier-ignore
return `invalid message: pubkey "${pubkey}" should have been a base58 string\n` + JSON.stringify(msg)
}
}
function validateGroupPubkey(msg, pubkeys) {
// Unusual case: if the msg is a feed root, ignore the group and pubkey
if (isFeedRoot(msg)) return
if (msg.metadata.group && !pubkeys.has(msg.pubkey)) {
// prettier-ignore
return `invalid message: pubkey "${msg.pubkey}" should have been one of "${[...pubkeys]}" from the group "${msg.metadata.group}"\n` + JSON.stringify(msg)
}
}
function validateMsgHash(str) {
try {
const hashBuf = Buffer.from(base58.decode(str))
if (hashBuf.length !== 16) {
// prettier-ignore
return `invalid message: decoded hash should be 16 bytes but was ${hashBuf.length}`
}
} catch (err) {
return `invalid message: msgHash "${str}" should have been a base58 string`
}
}
function validateDataSize(msg) {
const { dataSize } = msg.metadata
if (!Number.isSafeInteger(dataSize) || dataSize < 0) {
// prettier-ignore
return `invalid message: dataSize ${dataSize} should have been an unsigned integer\n` + JSON.stringify(msg)
}
}
function validateSignature(msg) {
const { sig } = msg
if (typeof sig !== 'string') {
return (
`invalid message: sig "${sig}" should have been a string\n` +
JSON.stringify(msg)
)
}
let sigBuf
try {
sigBuf = Buffer.from(base58.decode(sig))
if (sigBuf.length !== 64) {
// prettier-ignore
return `invalid message: sig should be 64 bytes but was ${sigBuf.length}\n` + JSON.stringify(msg)
}
} catch (err) {
// prettier-ignore
return `invalid message: sig "${sig}" should have been a base58 string\n` + JSON.stringify(msg)
}
const publicKeyBuf = Buffer.from(base58.decode(msg.pubkey))
const signableBuf = Buffer.from(stringify(msg.metadata), 'utf8')
const verified = ed25519.verify(publicKeyBuf, sigBuf, signableBuf)
if (!verified) {
return 'invalid message: sig is invalid\n' + JSON.stringify(msg)
}
}
/**
*
* @param {any} msg
* @param {Tangle} tangle
* @param {*} tangleId
* @returns
*/
function validateTangle(msg, tangle, tangleId) {
if (!msg.metadata.tangles[tangleId]) {
// prettier-ignore
return `invalid message: must have metadata.tangles.${tangleId}\n` + JSON.stringify(msg)
}
const { depth, prev } = msg.metadata.tangles[tangleId]
if (!prev || !Array.isArray(prev)) {
// prettier-ignore
return `invalid message: prev "${prev}" should have been an array\n` + JSON.stringify(msg)
}
if (!Number.isSafeInteger(depth) || depth <= 0) {
// prettier-ignore
return `invalid message: depth "${depth}" should have been a positive integer\n` + JSON.stringify(msg)
}
if (tangle.isFeed()) {
const { group, type } = tangle.getFeed()
if (type !== msg.metadata.type) {
// prettier-ignore
return `invalid message: type "${msg.metadata.type}" should have been feed type "${type}"\n` + JSON.stringify(msg)
}
if (group !== msg.metadata.group) {
// prettier-ignore
return `invalid message: group "${msg.metadata.group}" should have been feed group "${group}"\n` + JSON.stringify(msg)
}
}
let lastPrev = null
let minDiff = Infinity
let countPrevUnknown = 0
for (const p of prev) {
if (typeof p !== 'string') {
// prettier-ignore
return `invalid message: prev item "${p}" should have been a string\n` + JSON.stringify(msg)
}
if (p.startsWith('ppppp:')) {
// prettier-ignore
return `invalid message: prev item "${p}" is a URI, but should have been a hash\n` + JSON.stringify(msg)
}
if (lastPrev !== null) {
if (p === lastPrev) {
// prettier-ignore
return `invalid message: prev "${prev}" contains duplicates\n` + JSON.stringify(msg)
}
if (p < lastPrev) {
// prettier-ignore
return `invalid message: prev "${prev}" should have been alphabetically sorted\n` + JSON.stringify(msg)
}
}
lastPrev = p
if (!tangle.has(p)) {
countPrevUnknown += 1
continue
}
const prevDepth = tangle.getDepth(p)
const diff = depth - prevDepth
if (diff <= 0) {
// prettier-ignore
return `invalid message: depth of prev "${p}" should have been lower than this message's depth\n` + JSON.stringify(msg)
}
if (diff < minDiff) minDiff = diff
}
if (countPrevUnknown === prev.length) {
// prettier-ignore
return 'invalid message: all prev are locally unknown\n' + JSON.stringify(msg)
}
if (countPrevUnknown === 0 && minDiff !== 1) {
// prettier-ignore
return `invalid message: depth must be the largest prev depth plus one\n` + JSON.stringify(msg)
}
}
function validateTangleRoot(msg, msgHash, tangleId) {
if (msgHash !== tangleId) {
// prettier-ignore
return `invalid message: tangle root hash "${msgHash}" must match tangleId "${tangleId}"\n` + JSON.stringify(msg)
}
if (msg.metadata.tangles[tangleId]) {
// prettier-ignore
return `invalid message: tangle root "${tangleId}" must not have self tangle data\n` + JSON.stringify(msg)
}
}
function validateType(type) {
if (!type || typeof type !== 'string') {
// prettier-ignore
return `invalid type: "${type}" (${typeof type}) should have been a string`
}
if (type.length > 100) {
// prettier-ignore
return `invalid type: "${type}" is 100+ characters long`
}
if (type.length < 3) {
// prettier-ignore
return `invalid type: "${type}" is shorter than 3 characters`
}
if (/[^a-zA-Z0-9_]/.test(type)) {
// prettier-ignore
return `invalid type: "${type}" contains characters other than a-z, A-Z, 0-9, or _`
}
}
function validateData(msg) {
const { data } = msg
if (data === null) {
return
}
if (Array.isArray(data)) {
return (
`invalid message: data "${data}" must not be an array\n` +
JSON.stringify(msg)
)
}
if (typeof data !== 'object' && typeof data !== 'string') {
return (
`invalid message: data "${data}" must be an object or a string` +
JSON.stringify(msg)
)
}
const [dataHash, dataSize] = representData(data)
if (dataHash !== msg.metadata.dataHash) {
// prettier-ignore
return `invalid message: data hash "${dataHash}" does not match metadata.dataHash "${msg.metadata.dataHash}"\n` + JSON.stringify(msg)
}
if (dataSize !== msg.metadata.dataSize) {
// prettier-ignore
return `invalid message: data size "${dataSize}" does not match metadata.dataSize "${msg.metadata.dataSize}"\n` + JSON.stringify(msg)
}
}
function validate(msg, tangle, pubkeys, msgHash, rootHash) {
let err
if ((err = validateShape(msg))) return err
if ((err = validatePubkey(msg, pubkeys))) return err
if ((err = validateDataSize(msg))) return err
if ((err = validateData(msg))) return err
if ((err = validateType(msg.metadata.type))) return err
if ((err = validateGroupPubkey(msg, pubkeys))) return err
if (tangle.size() === 0) {
if ((err = validateTangleRoot(msg, msgHash, rootHash))) return err
} else {
if ((err = validateTangle(msg, tangle, rootHash))) return err
}
if ((err = validateSignature(msg))) return err
}
module.exports = {
validateType,
validateData,
validate,
validateMsgHash,
}

View File

@ -1,3 +1,30 @@
# Feed V2
JSON
```typescript
interface Msg {
data: any | null, // any object, or null
metadata: {
dataHash: ContentHash, // blake3 hash of the `content` object serialized
dataSize: number, // byte size (unsigned integer) of the `content` object serialized
group: string | null, // blake3 hash of a group tangle root msg, or null
groupTips: Array<string> | null, // list of blake3 hashes of group tangle tips, or null
tangles: {
// for each tangle this msg belongs to, identified by the tangle's root
[rootMsgHash: string]: {
depth: number, // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash>, // list of msg hashes of existing msgs, unique set and ordered alphabetically
},
},
type: string, // alphanumeric string, at least 3 chars, max 100 chars
v: 2, // hard-coded at 2, indicates the version of the feed format
},
pubkey: Pubkey, // base58 encoded string for the author's public key
sig: Signature, // Signs the `metadata` object
}
```
# Feed V1
JSON

View File

@ -4,7 +4,7 @@ const os = require('os')
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const FeedV1 = require('../lib/feed-v1')
const MsgV2 = require('../lib/msg-v2')
const p = require('util').promisify
const { generateKeypair } = require('./util')
@ -20,25 +20,32 @@ test('add()', async (t) => {
await peer.db.loaded()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const groupMsg0 = MsgV2.createGroup(keys)
const group = MsgV2.getMsgHash(groupMsg0)
await p(peer.db.add)(groupMsg0, group)
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const recRoot = await p(peer.db.add)(rootMsg, rootHash)
t.equals(recRoot.msg.metadata.size, 0, 'root msg added')
const tangle = new FeedV1.Tangle(rootHash)
t.equals(recRoot.msg.metadata.dataSize, 0, 'root msg added')
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(recRoot.hash, recRoot.msg)
const inputMsg = FeedV1.create({
const inputMsg = MsgV2.create({
keys,
type: 'post',
content: { text: 'This is the first post!' },
data: { text: 'This is the first post!' },
group,
groupTips: [group],
tangles: {
[rootHash]: tangle,
},
})
const rec = await p(peer.db.add)(inputMsg, rootHash)
t.equal(rec.msg.content.text, 'This is the first post!')
t.equal(rec.msg.data.text, 'This is the first post!')
await p(peer.close)(true)
})

View File

@ -1,133 +0,0 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-create')
rimraf.sync(DIR)
const keys = generateKeypair('alice')
const bobKeys = generateKeypair('bob')
let peer
test('setup', async (t) => {
peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
await peer.db.loaded()
})
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
let msgHash1
let rec1
let msgHash2
test('create()', async (t) => {
rec1 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 1st post' },
})
t.equal(rec1.msg.content.text, 'I am 1st post', 'msg1 text correct')
t.equal(
rec1.msg.metadata.tangles[rootHash].depth,
1,
'msg1 tangle depth correct'
)
t.deepEquals(
rec1.msg.metadata.tangles[rootHash].prev,
[rootHash],
'msg1 tangle prev correct'
)
msgHash1 = FeedV1.getMsgHash(rec1.msg)
const rec2 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 2nd post' },
})
t.equal(rec2.msg.content.text, 'I am 2nd post', 'msg2 text correct')
t.equal(
rec2.msg.metadata.tangles[rootHash].depth,
2,
'msg2 tangle depth correct'
)
t.deepEquals(
rec2.msg.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 tangle prev correct'
)
msgHash2 = FeedV1.getMsgHash(rec2.msg)
})
test('add() forked then create() merged', async (t) => {
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
tangle.add(rec1.hash, rec1.msg)
const msg3 = FeedV1.create({
keys,
type: 'post',
content: { text: '3rd post forked from 1st' },
tangles: {
[rootHash]: tangle,
},
})
const rec3 = await p(peer.db.add)(msg3, rootHash)
const msgHash3 = FeedV1.getMsgHash(rec3.msg)
const rec4 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 4th post' },
})
t.ok(rec4, '4th post created')
t.deepEquals(
rec4.msg.metadata.tangles[rootHash].prev,
[rootHash, msgHash2, msgHash3],
'msg4 prev is root, msg2 and msg3'
)
})
test('create() encrypted with box', async (t) => {
const recEncrypted = await p(peer.db.create)({
type: 'post',
content: { text: 'I am chewing food', recps: [peer.id] },
encryptionFormat: 'box',
})
t.equal(typeof recEncrypted.msg.content, 'string')
t.true(recEncrypted.msg.content.endsWith('.box'), '.box')
const msgDecrypted = peer.db.get(recEncrypted.hash)
t.equals(msgDecrypted.content.text, 'I am chewing food')
})
test('create() with tangles', async (t) => {
const recA = await p(peer.db.create)({
type: 'comment',
content: { text: 'I am root' },
})
t.equal(recA.msg.content.text, 'I am root', 'root text correct')
const recB = await p(peer.db.create)({
type: 'comment',
content: { text: 'I am comment 1' },
tangles: [recA.hash],
keys: bobKeys,
})
t.equal(recB.msg.metadata.tangles[recA.hash].depth, 1, 'tangle depth 1')
t.deepEquals(
recB.msg.metadata.tangles[recA.hash].prev,
[recA.hash],
'tangle prev'
)
})
test('teardown', (t) => {
peer.close(t.end)
})

View File

@ -20,18 +20,21 @@ test('del', async (t) => {
await peer.db.loaded()
const group = (await p(peer.db.group.create)(null)).hash
const msgHashes = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.create)({
const rec = await p(peer.db.feed.publish)({
group,
type: 'post',
content: { text: 'm' + i },
data: { text: 'm' + i },
})
msgHashes.push(rec.hash)
}
const before = []
for (const msg of peer.db.msgs()) {
if (msg.content) before.push(msg.content.text)
if (msg.data && msg.metadata.group) before.push(msg.data.text)
}
t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], 'msgs before the delete')
@ -40,7 +43,7 @@ test('del', async (t) => {
const after = []
for (const msg of peer.db.msgs()) {
if (msg.content) after.push(msg.content.text)
if (msg.data && msg.metadata.group) after.push(msg.data.text)
}
t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], 'msgs after the delete')
@ -78,7 +81,9 @@ test('del', async (t) => {
})
t.deepEqual(
persistedMsgs.filter((msg) => msg.content).map((msg) => msg.content.text),
persistedMsgs
.filter((msg) => msg.data && msg.metadata.group)
.map((msg) => msg.data.text),
['m0', 'm1', 'm3', 'm4'],
'msgs in disk after the delete'
)

View File

@ -20,18 +20,21 @@ test('erase', async (t) => {
await peer.db.loaded()
const group = (await p(peer.db.group.create)(null)).hash
const msgHashes = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.create)({
const rec = await p(peer.db.feed.publish)({
group,
type: 'post',
content: { text: 'm' + i },
data: { text: 'm' + i },
})
msgHashes.push(rec.hash)
}
const before = []
for (const msg of peer.db.msgs()) {
if (msg.content) before.push(msg.content.text)
if (msg.data && msg.metadata.group) before.push(msg.data.text)
}
t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], '5 msgs before the erase')
@ -40,7 +43,7 @@ test('erase', async (t) => {
const after = []
for (const msg of peer.db.msgs()) {
if (msg.content) after.push(msg.content.text)
if (msg.data && msg.metadata.group) after.push(msg.data.text)
}
t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], '4 msgs after the erase')

158
test/feed-publish.test.js Normal file
View File

@ -0,0 +1,158 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const MsgV2 = require('../lib/msg-v2')
const { generateKeypair } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-feed-publish')
rimraf.sync(DIR)
const keys = generateKeypair('alice')
const bobKeys = generateKeypair('bob')
let peer
let group
let rootMsg
let rootHash
test('setup', async (t) => {
peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
await peer.db.loaded()
group = (await p(peer.db.group.create)(null)).hash
rootMsg = MsgV2.createRoot(group, 'post', keys)
rootHash = MsgV2.getMsgHash(rootMsg)
})
let msgHash1
let rec1
let msgHash2
test('feed.publish()', async (t) => {
rec1 = await p(peer.db.feed.publish)({
group,
type: 'post',
data: { text: 'I am 1st post' },
})
t.equal(rec1.msg.data.text, 'I am 1st post', 'msg1 text correct')
t.equal(
rec1.msg.metadata.tangles[rootHash].depth,
1,
'msg1 tangle depth correct'
)
t.deepEquals(
rec1.msg.metadata.tangles[rootHash].prev,
[rootHash],
'msg1 tangle prev correct'
)
msgHash1 = MsgV2.getMsgHash(rec1.msg)
const rec2 = await p(peer.db.feed.publish)({
group,
type: 'post',
data: { text: 'I am 2nd post' },
})
t.equal(rec2.msg.data.text, 'I am 2nd post', 'msg2 text correct')
t.equal(
rec2.msg.metadata.tangles[rootHash].depth,
2,
'msg2 tangle depth correct'
)
t.deepEquals(
rec2.msg.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 tangle prev correct'
)
msgHash2 = MsgV2.getMsgHash(rec2.msg)
})
test('add() forked then feed.publish() merged', async (t) => {
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
tangle.add(rec1.hash, rec1.msg)
const msg3 = MsgV2.create({
keys,
group,
groupTips: [group],
type: 'post',
data: { text: '3rd post forked from 1st' },
tangles: {
[rootHash]: tangle,
},
})
const rec3 = await p(peer.db.add)(msg3, rootHash)
const msgHash3 = MsgV2.getMsgHash(rec3.msg)
const rec4 = await p(peer.db.feed.publish)({
group,
type: 'post',
data: { text: 'I am 4th post' },
})
t.ok(rec4, '4th post published')
t.equals(
rec4.msg.metadata.tangles[rootHash].prev.length,
3,
'msg4 prev has 3' // is root, msg2 and msg3'
)
t.true(
rec4.msg.metadata.tangles[rootHash].prev.includes(rootHash),
'msg4 prev has root'
)
t.true(
rec4.msg.metadata.tangles[rootHash].prev.includes(msgHash2),
'msg4 prev has msg2'
)
t.true(
rec4.msg.metadata.tangles[rootHash].prev.includes(msgHash3),
'msg4 prev has msg3'
)
})
test('feed.publish() encrypted with box', async (t) => {
const recEncrypted = await p(peer.db.feed.publish)({
group,
type: 'post',
data: { text: 'I am chewing food', recps: [peer.id] },
encryptionFormat: 'box',
})
t.equal(typeof recEncrypted.msg.data, 'string')
t.true(recEncrypted.msg.data.endsWith('.box'), '.box')
const msgDecrypted = peer.db.get(recEncrypted.hash)
t.equals(msgDecrypted.data.text, 'I am chewing food')
})
test('feed.publish() with tangles', async (t) => {
const recA = await p(peer.db.feed.publish)({
group,
type: 'comment',
data: { text: 'I am root' },
})
t.equal(recA.msg.data.text, 'I am root', 'root text correct')
const recB = await p(peer.db.feed.publish)({
group,
type: 'comment',
data: { text: 'I am comment 1' },
tangles: [recA.hash],
keys: bobKeys,
})
t.equal(recB.msg.metadata.tangles[recA.hash].depth, 1, 'tangle depth 1')
t.deepEquals(
recB.msg.metadata.tangles[recA.hash].prev,
[recA.hash],
'tangle prev'
)
})
test('teardown', (t) => {
peer.close(t.end)
})

View File

@ -13,6 +13,7 @@ rimraf.sync(DIR)
const keys = generateKeypair('alice')
let peer
let group
let msgHash1
let msgId1
test('setup', async (t) => {
@ -23,9 +24,12 @@ test('setup', async (t) => {
await peer.db.loaded()
const rec1 = await p(peer.db.create)({
group = (await p(peer.db.group.create)(null)).hash
const rec1 = await p(peer.db.feed.publish)({
group,
type: 'post',
content: { text: 'I am 1st post' },
data: { text: 'I am 1st post' },
})
msgHash1 = FeedV1.getMsgHash(rec1.msg)
msgId1 = FeedV1.getMsgId(rec1.msg)
@ -34,13 +38,13 @@ test('setup', async (t) => {
test('get() supports ppppp URIs', async (t) => {
const msg = peer.db.get(msgId1)
t.ok(msg, 'msg exists')
t.equals(msg.content.text, 'I am 1st post')
t.equals(msg.data.text, 'I am 1st post')
})
test('get() supports msg hashes', async (t) => {
const msg = peer.db.get(msgHash1)
t.ok(msg, 'msg exists')
t.equals(msg.content.text, 'I am 1st post')
t.equals(msg.data.text, 'I am 1st post')
})
test('teardown', (t) => {

View File

@ -25,31 +25,36 @@ test('setup', async (t) => {
await peer.db.loaded()
// Slow down append so that we can create msgs in parallel
const group = (await p(peer.db.group.create)(null)).hash
// Slow down append so that we can trigger msg creation in parallel
const originalAppend = peer.db._getLog().append
peer.db._getLog().append = function (...args) {
setTimeout(originalAppend, 20, ...args)
}
rootPost = (
await p(peer.db.create)({
await p(peer.db.feed.publish)({
group,
keys: keysA,
type: 'comment',
content: { text: 'root' },
data: { text: 'root' },
})
).hash
const [{ hash: reply1B }, { hash: reply1C }] = await Promise.all([
p(peer.db.create)({
p(peer.db.feed.publish)({
group,
keys: keysB,
type: 'comment',
content: { text: 'reply 1' },
data: { text: 'reply 1B' },
tangles: [rootPost],
}),
p(peer.db.create)({
p(peer.db.feed.publish)({
group,
keys: keysC,
type: 'comment',
content: { text: 'reply 1' },
data: { text: 'reply 1C' },
tangles: [rootPost],
}),
])
@ -57,25 +62,28 @@ test('setup', async (t) => {
reply1Hi = reply1B.localeCompare(reply1C) < 0 ? reply1C : reply1B
reply2A = (
await p(peer.db.create)({
await p(peer.db.feed.publish)({
group,
keys: keysA,
type: 'comment',
content: { text: 'reply 2' },
data: { text: 'reply 2' },
tangles: [rootPost],
})
).hash
const [{ hash: reply3B }, { hash: reply3C }] = await Promise.all([
p(peer.db.create)({
p(peer.db.feed.publish)({
group,
keys: keysB,
type: 'comment',
content: { text: 'reply 3' },
data: { text: 'reply 3B' },
tangles: [rootPost],
}),
p(peer.db.create)({
p(peer.db.feed.publish)({
group,
keys: keysC,
type: 'comment',
content: { text: 'reply 3' },
data: { text: 'reply 3C' },
tangles: [rootPost],
}),
])

271
test/msg-v2/create.test.js Normal file
View File

@ -0,0 +1,271 @@
const tape = require('tape')
const MsgV2 = require('../../lib/msg-v2')
const { generateKeypair } = require('../util')
let group
tape('MsgV2.createGroup()', (t) => {
const keys = generateKeypair('alice')
const groupMsg0 = MsgV2.createGroup(keys, 'MYNONCE')
console.log(JSON.stringify(groupMsg0, null, 2))
t.equals(groupMsg0.data.add, keys.id, 'data.add')
t.equals(groupMsg0.metadata.dataHash, 'THi3VkJeaf8aTkLSNJUdFD', 'hash')
t.equals(groupMsg0.metadata.dataSize, 72, 'size')
t.equals(groupMsg0.metadata.group, null, 'group')
t.equals(groupMsg0.metadata.groupTips, null, 'groupTips')
t.deepEquals(groupMsg0.metadata.tangles, {}, 'tangles')
t.equals(groupMsg0.metadata.type, 'group', 'type')
t.equals(groupMsg0.metadata.v, 2, 'v')
t.equals(groupMsg0.pubkey, keys.id, 'pubkey')
group = MsgV2.getMsgHash(groupMsg0)
t.equals(group, 'XKKmEBmqKGa5twQ2HNSk7t', 'group ID')
t.end()
})
let rootMsg = null
let rootHash = null
tape('MsgV2.createRoot()', (t) => {
const keys = generateKeypair('alice')
rootMsg = MsgV2.createRoot(group, 'post', keys)
console.log(JSON.stringify(rootMsg, null, 2))
t.equals(rootMsg.data, null, 'data')
t.equals(rootMsg.metadata.dataHash, null, 'hash')
t.equals(rootMsg.metadata.dataSize, 0, 'size')
t.equals(rootMsg.metadata.group, group, 'group')
t.equals(rootMsg.metadata.groupTips, null, 'groupTips')
t.deepEquals(rootMsg.metadata.tangles, {}, 'tangles')
t.equals(rootMsg.metadata.type, 'post', 'type')
t.equals(rootMsg.metadata.v, 2, 'v')
t.equals(rootMsg.pubkey, keys.id, 'pubkey')
rootHash = MsgV2.getMsgHash(rootMsg)
t.equals(rootHash, 'PzuT1Dwbbgn6a8NeLuHuKw', 'root hash')
t.end()
})
tape('MsgV2.create()', (t) => {
const keys = generateKeypair('alice')
const data = { text: 'Hello world!' }
const tangle1 = new MsgV2.Tangle(rootHash)
tangle1.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data,
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle1,
},
})
console.log(JSON.stringify(msg1, null, 2))
t.deepEqual(msg1.data, data, 'data')
t.deepEquals(
Object.keys(msg1.metadata),
['dataHash', 'dataSize', 'group', 'groupTips', 'tangles', 'type', 'v'],
'metadata shape'
)
t.deepEquals(
msg1.metadata.dataHash,
'9R7XmBhHF5ooPg34j9TQcz',
'metadata.dataHash'
)
t.deepEquals(msg1.metadata.dataSize, 23, 'metadata.dataSize')
t.equals(msg1.metadata.group, group, 'metadata.group')
t.deepEquals(msg1.metadata.groupTips, [group], 'metadata.groupTips')
t.deepEquals(
Object.keys(msg1.metadata.tangles),
[rootHash],
'metadata.tangles'
)
t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'tangle depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'tangle prev')
t.equals(msg1.metadata.type, 'post', 'metadata.type')
t.deepEquals(msg1.metadata.v, 2, 'metadata.v')
t.equals(
msg1.pubkey,
'4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW',
'pubkey'
)
t.equals(
msg1.sig,
'CW8gWiiqtEgPQ2NjXWHJb5aeW4vkKMG9d1BqPJDjSJaw6xX6s5GUTvoobNSBtaLv8CKNXHHJXSr9Vbe7Cew9pkv',
'sig'
)
const msgHash1 = '7miH6Zh63cyMJTT5bhDjZF'
t.equals(
MsgV2.getMsgId(msg1),
`ppppp:message/v2/${group}/post/${msgHash1}`,
'getMsgId'
)
const tangle2 = new MsgV2.Tangle(rootHash)
tangle2.add(rootHash, rootMsg)
tangle2.add(msgHash1, msg1)
const data2 = { text: 'Ola mundo!' }
const msg2 = MsgV2.create({
keys,
data: data2,
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle2,
},
})
console.log(JSON.stringify(msg2, null, 2))
t.deepEqual(msg2.data, data2, 'data')
t.deepEquals(
Object.keys(msg2.metadata),
['dataHash', 'dataSize', 'group', 'groupTips', 'tangles', 'type', 'v'],
'metadata shape'
)
t.deepEquals(
msg2.metadata.dataHash,
'XuZEzH1Dhy1yuRMcviBBcN',
'metadata.dataHash'
)
t.deepEquals(msg2.metadata.dataSize, 21, 'metadata.dataSize')
t.equals(msg2.metadata.group, group, 'metadata.group')
t.deepEquals(msg2.metadata.groupTips, [group], 'metadata.groupTips')
t.deepEquals(
Object.keys(msg2.metadata.tangles),
[rootHash],
'metadata.tangles'
)
t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'tangle depth')
t.deepEquals(msg2.metadata.tangles[rootHash].prev, [msgHash1], 'tangle prev')
t.equals(msg2.metadata.type, 'post', 'metadata.type')
t.deepEquals(msg2.metadata.v, 2, 'metadata.v')
t.equals(
msg2.pubkey,
'4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW',
'pubkey'
)
t.equals(
msg2.sig,
'33PStdQ8kdvL1pSpd6x9LuxcpEvDmsRNhAq7t75v66cthSHHuiJVqp57b9J7QVXp7a1Jw5qaZLycYQspJRbKNWyW',
'sig'
)
t.deepEqual(
MsgV2.getMsgId(msg2),
`ppppp:message/v2/${group}/post/HTtEmjCBXGBRTMM3mgekWu`,
'getMsgId'
)
t.end()
})
tape('create() handles DAG tips correctly', (t) => {
const keys = generateKeypair('alice')
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data: { text: '1' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = MsgV2.getMsgHash(msg1)
t.deepEquals(
msg1.metadata.tangles[rootHash].prev,
[MsgV2.getFeedRootHash(group, 'post')],
'msg1.prev is root'
)
tangle.add(msgHash1, msg1)
const msg2A = MsgV2.create({
keys,
data: { text: '2A' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
t.deepEquals(
msg2A.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2A.prev is msg1'
)
const msg2B = MsgV2.create({
keys,
data: { text: '2B' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2B = MsgV2.getMsgHash(msg2B)
t.deepEquals(
msg2B.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2B.prev is msg1'
)
tangle.add(msgHash2B, msg2B)
const msg3 = MsgV2.create({
keys,
data: { text: '3' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash3 = MsgV2.getMsgHash(msg3)
t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2B].sort(),
'msg3.prev is [root(lipmaa),msg2B(previous)], sorted'
)
tangle.add(msgHash3, msg3)
const msgHash2A = MsgV2.getMsgHash(msg2A)
tangle.add(msgHash2A, msg2A)
t.pass('msg2A comes into awareness')
const msg4 = MsgV2.create({
keys,
data: { text: '4' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
t.deepEquals(
msg4.metadata.tangles[rootHash].prev,
[msgHash3, msgHash2A].sort(),
'msg4.prev is [msg3(previous),msg2A(old fork as tip)], sorted'
)
t.end()
})

View File

@ -0,0 +1,344 @@
const tape = require('tape')
const base58 = require('bs58')
const MsgV2 = require('../../lib/msg-v2')
const { generateKeypair } = require('../util')
const keys = generateKeypair('alice')
const group = MsgV2.getMsgHash(MsgV2.createGroup(keys, 'MYNONCE'))
const pubkeys = new Set([keys.id])
tape('invalid msg with non-array prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
msg.metadata.tangles[rootHash].prev = null
const msgHash = MsgV2.getMsgHash(msg)
const err = MsgV2.validate(msg, tangle, pubkeys, msgHash, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(
err,
/prev ".*" should have been an array/,
'invalid 2nd msg description'
)
t.end()
})
tape('invalid msg with bad prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = MsgV2.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const msg2 = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.tangles[rootHash].prev = [1234]
const msgHash2 = MsgV2.getMsgHash(msg2)
const err = MsgV2.validate(msg2, tangle, pubkeys, msgHash2, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(
err,
/prev item ".*" should have been a string/,
'invalid 2nd msg description'
)
t.end()
})
tape('invalid msg with URI in prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = MsgV2.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const msg2 = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2 = MsgV2.getMsgHash(msg2)
const randBuf = Buffer.alloc(16).fill(16)
const fakeMsgKey1 = `ppppp:message/v2/${base58.encode(randBuf)}`
msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.tangles[rootHash].prev = [fakeMsgKey1]
const err = MsgV2.validate(msg2, tangle, pubkeys, msgHash2, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(err, /prev item ".*" is a URI/, 'invalid 2nd msg description')
t.end()
})
tape('invalid msg with unknown prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = MsgV2.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const unknownMsg = MsgV2.create({
keys,
data: { text: 'Alien' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const unknownMsgHash = MsgV2.getMsgHash(unknownMsg)
const fakeRootHash = 'ABCDEabcde' + rootHash.substring(10)
const tangle2 = new MsgV2.Tangle(fakeRootHash)
tangle2.add(fakeRootHash, rootMsg)
tangle2.add(unknownMsgHash, unknownMsg)
const msg2 = MsgV2.create({
keys,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle2,
},
})
const msgHash2 = MsgV2.getMsgHash(msg2)
const err = MsgV2.validate(msg2, tangle, pubkeys, msgHash2, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(err, /all prev are locally unknown/, 'invalid 2nd msg description')
t.end()
})
tape('invalid feed msg with a different pubkey', (t) => {
const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob')
const groupB = MsgV2.getMsgHash(MsgV2.createGroup(keysB, 'MYNONCE'))
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const feedTangle = new MsgV2.Tangle(rootHash)
feedTangle.add(rootHash, rootMsg)
const msg = MsgV2.create({
keys: keysB,
data: { text: 'Hello world!' },
group: groupB,
groupTips: [groupB],
type: 'post',
tangles: {
[rootHash]: feedTangle,
},
})
const msgHash = MsgV2.getMsgHash(msg)
const err = MsgV2.validate(msg, feedTangle, pubkeys, msgHash, rootHash)
t.match(
err,
/pubkey ".*" should have been one of ".*" from the group ".*"/,
'invalid msg'
)
t.end()
})
tape('invalid feed msg with a different type', (t) => {
const keysA = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const feedTangle = new MsgV2.Tangle(rootHash)
feedTangle.add(rootHash, rootMsg)
const msg = MsgV2.create({
keys: keysA,
data: { text: 'Hello world!' },
group,
groupTips: [group],
type: 'comment',
tangles: {
[rootHash]: feedTangle,
},
})
const msgHash = MsgV2.getMsgHash(msg)
const err = MsgV2.validate(msg, feedTangle, pubkeys, msgHash, rootHash)
t.match(
err,
/type "comment" should have been feed type "post"/,
'invalid feed msg'
)
t.end()
})
tape('invalid feed msg with non-alphabetical prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data: { text: '1' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = MsgV2.getMsgHash(msg1)
const msg2 = MsgV2.create({
keys,
data: { text: '2' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2 = MsgV2.getMsgHash(msg2)
tangle.add(msgHash1, msg1)
tangle.add(msgHash2, msg2)
const msg3 = MsgV2.create({
keys,
data: { text: '3' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash3 = MsgV2.getMsgHash(msg3)
let prevHashes = msg3.metadata.tangles[rootHash].prev
if (prevHashes[0] < prevHashes[1]) {
prevHashes = [prevHashes[1], prevHashes[0]]
} else {
prevHashes = [prevHashes[0], prevHashes[1]]
}
msg3.metadata.tangles[rootHash].prev = prevHashes
const err = MsgV2.validate(msg3, tangle, pubkeys, msgHash3, rootHash)
t.ok(err, 'invalid 3rd msg throws')
t.match(
err,
/prev ".*" should have been alphabetically sorted/,
'invalid error message'
)
t.end()
})
tape('invalid feed msg with duplicate prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = MsgV2.createRoot(group, 'post', keys)
const rootHash = MsgV2.getMsgHash(rootMsg)
const tangle = new MsgV2.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = MsgV2.create({
keys,
data: { text: '1' },
group,
groupTips: [group],
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = MsgV2.getMsgHash(msg1)
const [prevHash] = msg1.metadata.tangles[rootHash].prev
msg1.metadata.tangles[rootHash].prev = [prevHash, prevHash]
const err = MsgV2.validate(msg1, tangle, pubkeys, msgHash1, rootHash)
t.ok(err, 'invalid 1st msg throws')
t.match(err, /prev ".*" contains duplicates/, 'invalid error message')
t.end()
})

View File

@ -0,0 +1,89 @@
const tape = require('tape')
const MsgV2 = require('../../lib/msg-v2')
const { generateKeypair } = require('../util')
tape('invalid type not a string', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
MsgV2.create({
keys,
data: { text: 'Hello world!' },
type: 123,
})
},
/invalid type/,
'not a string'
)
t.end()
})
tape('invalid type with "/" character', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
MsgV2.create({
keys,
data: { text: 'Hello world!' },
type: 'group/init',
})
},
/invalid type/,
'invalid type if contains /'
)
t.end()
})
tape('invalid type with "*" character', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
MsgV2.create({
keys,
data: { text: 'Hello world!' },
type: 'star*',
})
},
/invalid type/,
'invalid type if contains *'
)
t.end()
})
tape('invalid type too short', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
MsgV2.create({
keys,
data: { text: 'Hello world!' },
type: 'xy',
})
},
/shorter than 3/,
'invalid type if too short'
)
t.end()
})
tape('invalid type too long', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
MsgV2.create({
keys,
data: { text: 'Hello world!' },
type: 'a'.repeat(120),
})
},
/100\+ characters long/,
'invalid type if too long'
)
t.end()
})

View File

@ -18,10 +18,13 @@ test('msgs() iterator', async (t) => {
await peer.db.loaded()
const group = (await p(peer.db.group.create)(null)).hash
for (let i = 0; i < 6; i++) {
await p(peer.db.create)({
await p(peer.db.feed.publish)({
group,
type: i % 2 === 0 ? 'post' : 'about',
content:
data:
i % 2 === 0
? { text: 'hello ' + i }
: { about: peer.id, name: 'Mr. #' + i },
@ -31,9 +34,9 @@ test('msgs() iterator', async (t) => {
const posts = []
const abouts = []
for (const msg of peer.db.msgs()) {
if (!msg.content) continue
if (msg.metadata.type === 'post') posts.push(msg.content.text)
else if (msg.metadata.type === 'about') abouts.push(msg.content.name)
if (!msg.data) continue
if (msg.metadata.type === 'post') posts.push(msg.data.text)
else if (msg.metadata.type === 'about') abouts.push(msg.data.name)
}
t.deepEqual(posts, ['hello 0', 'hello 2', 'hello 4'], 'queried posts')

View File

@ -18,23 +18,27 @@ test('onRecordAdded', async (t) => {
await peer.db.loaded()
const group = (await p(peer.db.group.create)(null)).hash
const listened = []
var remove = peer.db.onRecordAdded((ev) => {
listened.push(ev)
})
const rec1 = await p(peer.db.create)({
const rec1 = await p(peer.db.feed.publish)({
group,
type: 'post',
content: { text: 'I am hungry' },
data: { text: 'I am hungry' },
})
t.equal(rec1.msg.content.text, 'I am hungry', 'msg1 text correct')
t.equal(rec1.msg.data.text, 'I am hungry', 'msg1 text correct')
await p(setTimeout)(500)
t.equal(listened.length, 2)
t.deepEquals(listened[0].msg.content, null, 'root')
t.deepEquals(listened[0].msg.metadata.size, 0, 'root')
t.deepEquals(listened[1], rec1, 'actual record')
t.equal(listened.length, 3)
t.equals(listened[0].msg.metadata.group, null, 'group root')
t.equals(listened[1].msg.data, null, 'root')
t.equals(listened[1].msg.metadata.dataSize, 0, 'root')
t.deepEquals(listened[2], rec1, 'actual record')
remove()
await p(peer.close)(true)

View File

@ -10,7 +10,7 @@ const { generateKeypair } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-re-open')
rimraf.sync(DIR)
test('create some msgs, close, re-open', async (t) => {
test('publish some msgs, close, re-open', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
@ -18,13 +18,15 @@ test('create some msgs, close, re-open', async (t) => {
.call(null, { keys, path: DIR })
await peer.db.loaded()
const group = (await p(peer.db.group.create)(null)).hash
t.pass('opened db')
const msgHashes = []
for (let i = 0; i < 6; i++) {
const rec = await p(peer.db.create)({
const rec = await p(peer.db.feed.publish)({
group,
type: 'post',
content: { text: 'hello ' + i },
data: { text: 'hello ' + i },
})
msgHashes.push(rec.hash)
}
@ -46,8 +48,8 @@ test('create some msgs, close, re-open', async (t) => {
const texts = []
for (const msg of peer2.db.msgs()) {
if (!msg.content) continue
texts.push(msg.content.text)
if (!msg.data || !msg.metadata.group) continue
texts.push(msg.data.text)
}
t.deepEquals(

View File

@ -17,11 +17,13 @@ test('records() iterator', async (t) => {
.call(null, { keys, path: DIR })
await peer.db.loaded()
const group = (await p(peer.db.group.create)(null)).hash
for (let i = 0; i < 6; i++) {
await p(peer.db.create)({
await p(peer.db.feed.publish)({
group,
type: i % 2 === 0 ? 'post' : 'about',
content:
data:
i % 2 === 0
? { text: 'hello ' + i }
: { about: peer.id, name: 'Mr. #' + i },
@ -30,8 +32,9 @@ test('records() iterator', async (t) => {
let count = 0
for (const rec of peer.db.records()) {
if (!rec.msg.content) continue
t.true(rec.misc.size > rec.msg.metadata.size)
if (!rec.msg.data) continue
if (!rec.msg.metadata.group) continue
t.true(rec.misc.size > rec.msg.metadata.dataSize, 'size > dataSize')
count++
}
t.equals(count, 6)

View File

@ -5,7 +5,7 @@ const base58 = require('bs58')
function generateKeypair(seed) {
const keys = ssbKeys.generate('ed25519', seed, 'buttwoo-v1')
const { data } = SSBURI.decompose(keys.id)
keys.id = `ppppp:feed/v1/${base58.encode(Buffer.from(data, 'base64'))}`
keys.id = base58.encode(Buffer.from(data, 'base64'))
return keys
}