make feed depth/prev a regular tangle

This commit is contained in:
Andre Staltz 2023-04-10 22:08:24 +03:00
parent 3041ffc60a
commit 4e334e242c
17 changed files with 528 additions and 420 deletions

View File

@ -12,9 +12,7 @@ const {
validateType, validateType,
validateContent, validateContent,
validate, validate,
validateOOO,
validateBatch, validateBatch,
validateOOOBatch,
validateMsgHash, validateMsgHash,
} = require('./validation') } = require('./validation')
@ -23,7 +21,7 @@ const {
*/ */
/** /**
* @typedef {Object} TangleData * @typedef {Object} TangleMetadata
* @property {number} depth * @property {number} depth
* @property {Array<string>} prev * @property {Array<string>} prev
*/ */
@ -32,80 +30,73 @@ const {
* @typedef {Object} Msg * @typedef {Object} Msg
* @property {*} content * @property {*} content
* @property {Object} metadata * @property {Object} metadata
* @property {number} metadata.depth
* @property {Array<string>} metadata.prev
* @property {string} metadata.proof * @property {string} metadata.proof
* @property {number} metadata.size * @property {number} metadata.size
* @property {Record<string, TangleData>=} metadata.tangles * @property {Record<string, TangleMetadata>} metadata.tangles
* @property {string=} metadata.type * @property {string} metadata.type
* @property {string} metadata.who * @property {string} metadata.who
* @property {number=} metadata.when * @property {number} metadata.when
* @property {string} sig * @property {string} sig
*/ */
/**
* @typedef {Object} Keys
* @property {string} keys.id
* @property {string} keys.private
*/
/** /**
* @typedef {Object} CreateOpts * @typedef {Object} CreateOpts
* @property {*} content * @property {*} content
* @property {string} type * @property {string} type
* @property {number} when * @property {number} when
* @property {Object} keys * @property {Keys} keys
* @property {Record<string, MsgIter>} tangles
*/
/**
* @typedef {Object} CreateRootOpts
* @property {string} type
* @property {Keys} keys
* @property {string} keys.id * @property {string} keys.id
* @property {string} keys.private * @property {string} keys.private
* @property {MsgIter} existing
* @property {Record<string, MsgIter>=} tangles
*/ */
/** /**
* @param {Msg} msg * @param {Msg} msg
*/ */
function getFeedId(msg) { // function getFeedId(msg) {
if (msg.metadata.type) { // if (msg.metadata.type) {
return `ppppp:feed/v1/${msg.metadata.who}/${msg.metadata.type}` // return `ppppp:feed/v1/${msg.metadata.who}/${msg.metadata.type}`
} else { // } else {
return `ppppp:feed/v1/${msg.metadata.who}` // return `ppppp:feed/v1/${msg.metadata.who}`
} // }
} // }
function isMsg(x) { // function isMsg(x) {
return ( // return (
typeof x === 'object' && // typeof x === 'object' &&
!!x && // !!x &&
typeof x.metadata.author === 'string' && // typeof x.metadata.author === 'string' &&
x.metadata.author && // x.metadata.author &&
typeof x.metadata.type === 'string' && // typeof x.metadata.type === 'string' &&
x.metadata.type // x.metadata.type
) // )
} // }
function isFeedId(author) { // function isFeedId(author) {
if (typeof author !== 'string') return false // if (typeof author !== 'string') return false
return author.startsWith('ppppp:feed/v1/') // return author.startsWith('ppppp:feed/v1/')
} // }
function toPlaintextBuffer(opts) { function toPlaintextBuffer(opts) {
return Buffer.from(stringify(opts.content), 'utf8') return Buffer.from(stringify(opts.content), 'utf8')
} }
function readDepth(msg, tangleId = null) {
if (tangleId) {
return msg.metadata.tangles?.[tangleId]?.depth ?? 0
} else {
return msg.metadata.depth
}
}
function readPrev(msg, tangleId = null) {
if (tangleId) {
return msg.metadata.tangles?.[tangleId]?.prev ?? []
} else {
return msg.metadata.prev
}
}
function calculateDepth(existing, tangleId = null) { function calculateDepth(existing, tangleId = null) {
let max = -1 let max = -1
for (const msg of existing.values()) { for (const msg of existing.values()) {
const depth = readDepth(msg, tangleId) const depth = msg.metadata.tangles[tangleId]?.depth ?? 0
if (depth > max) { if (depth > max) {
max = depth max = depth
} }
@ -148,7 +139,7 @@ function determineTips(existing, tangleId = null) {
} }
for (const msg of existing.values()) { for (const msg of existing.values()) {
const prev = readPrev(msg, tangleId) const prev = msg.metadata.tangles[tangleId]?.prev ?? []
for (const p of prev) { for (const p of prev) {
tips.delete(p) tips.delete(p)
} }
@ -160,7 +151,7 @@ function calculatePrev(existing, depth, lipmaaDepth, tangleId = null) {
const prev = [] const prev = []
const tips = determineTips(existing, tangleId) const tips = determineTips(existing, tangleId)
for (const msg of existing.values()) { for (const msg of existing.values()) {
const msgDepth = readDepth(msg, tangleId) const msgDepth = msg.metadata.tangles[tangleId]?.depth ?? 0
const msgHash = getMsgHash(msg) const msgHash = getMsgHash(msg)
if ( if (
msgDepth === depth - 1 || msgDepth === depth - 1 ||
@ -173,7 +164,12 @@ function calculatePrev(existing, depth, lipmaaDepth, tangleId = null) {
return prev return prev
} }
function prevalidateExisting(existing, tangleId = null) { /**
* @param {MsgIter} existing
* @param {string} tangleId
* @returns
*/
function prevalidateExisting(existing, tangleId) {
if (!existing?.[Symbol.iterator]) { if (!existing?.[Symbol.iterator]) {
// prettier-ignore // prettier-ignore
return new Error(`existing must be an iterator, but got ${typeof existing}`) return new Error(`existing must be an iterator, but got ${typeof existing}`)
@ -182,6 +178,10 @@ function prevalidateExisting(existing, tangleId = null) {
// prettier-ignore // prettier-ignore
return new Error(`existing must be a Map, Set, or Array, but got ${existing}`) return new Error(`existing must be a Map, Set, or Array, but got ${existing}`)
} }
if (!tangleId) {
// prettier-ignore
return new Error(`tangleId must be a string, but got ${typeof tangleId}`)
}
let isEmpty = true let isEmpty = true
let hasDepthZeroMsg = false let hasDepthZeroMsg = false
for (const p of existing.values()) { for (const p of existing.values()) {
@ -191,27 +191,18 @@ function prevalidateExisting(existing, tangleId = null) {
return new Error(`existing must contain messages, but got ${typeof p}`) return new Error(`existing must contain messages, but got ${typeof p}`)
} }
if (!tangleId && p.metadata.depth === 0) { if (!p.metadata.tangles[tangleId] && getMsgHash(p) === tangleId) {
if (hasDepthZeroMsg) { if (hasDepthZeroMsg) {
// prettier-ignore // prettier-ignore
return new Error(`existing must contain only 1 message with depth 0`) return new Error(`existing must contain only 1 message with depth 0`)
} else { } else {
hasDepthZeroMsg = true hasDepthZeroMsg = true
} }
} else if (tangleId) { } else if (!p.metadata.tangles[tangleId]) {
if (!p.metadata.tangles?.[tangleId] && getMsgHash(p) === tangleId) {
if (hasDepthZeroMsg) {
// prettier-ignore
return new Error(`existing must contain only 1 message with depth 0`)
} else {
hasDepthZeroMsg = true
}
} else if (!p.metadata.tangles?.[tangleId]) {
// prettier-ignore // prettier-ignore
return new Error(`existing must refer to the tangleId ${tangleId}`) return new Error(`existing must refer to the tangleId ${tangleId}`)
} }
} }
}
if (!isEmpty && !hasDepthZeroMsg) { if (!isEmpty && !hasDepthZeroMsg) {
// prettier-ignore // prettier-ignore
return new Error(`opts.existing must contain the message with depth 0`) return new Error(`opts.existing must contain the message with depth 0`)
@ -225,14 +216,11 @@ function prevalidateExisting(existing, tangleId = null) {
function create(opts) { function create(opts) {
let err let err
if ((err = validateType(opts.type))) throw err if ((err = validateType(opts.type))) throw err
if ((err = prevalidateExisting(opts.existing))) throw err if (!opts.tangles) throw new Error('opts.tangles is required')
const [proof, size] = representContent(opts.content) const [proof, size] = representContent(opts.content)
const depth = calculateDepth(opts.existing)
const lipmaaDepth = lipmaa(depth + 1) - 1
const prev = calculatePrev(opts.existing, depth, lipmaaDepth)
let tangles = null const tangles = {}
if (opts.tangles) { if (opts.tangles) {
for (const rootId in opts.tangles) { for (const rootId in opts.tangles) {
if ((err = validateMsgHash(rootId))) throw err if ((err = validateMsgHash(rootId))) throw err
@ -242,19 +230,19 @@ function create(opts) {
const depth = calculateDepth(existing, rootId) const depth = calculateDepth(existing, rootId)
const lipmaaDepth = lipmaa(depth + 1) - 1 const lipmaaDepth = lipmaa(depth + 1) - 1
const prev = calculatePrev(existing, depth, lipmaaDepth, rootId) const prev = calculatePrev(existing, depth, lipmaaDepth, rootId)
tangles ??= {}
tangles[rootId] = { depth, prev } tangles[rootId] = { depth, prev }
} }
} else {
// prettier-ignore
throw new Error(`cannot create msg without tangles, that's the case for createRoot()`)
} }
const msg = { const msg = {
content: opts.content, content: opts.content,
metadata: { metadata: {
depth,
prev,
proof, proof,
size, size,
...(tangles ? { tangles } : null), tangles,
type: opts.type, type: opts.type,
who: stripAuthor(opts.keys.id), who: stripAuthor(opts.keys.id),
when: +opts.when, when: +opts.when,
@ -273,6 +261,38 @@ function create(opts) {
return msg return msg
} }
/**
* @param {Keys} keys
* @param {string} type
* @returns {Msg}
*/
function createRoot(keys, type) {
let err
if ((err = validateType(type))) throw err
const msg = {
content: null,
metadata: {
proof: '',
size: 0,
tangles: {},
type,
who: stripAuthor(keys.id),
when: 0,
},
sig: '',
}
const privateKey = Buffer.from(keys.private, 'base64')
// TODO: add a label prefix to the metadata before signing
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
// TODO: when signing, what's the point of a customizable hmac?
const sigBuf = ed25519.sign(privateKey, metadataBuf)
msg.sig = base58.encode(sigBuf)
return msg
}
/** /**
* @param {Buffer} plaintextBuf * @param {Buffer} plaintextBuf
* @param {Msg} msg * @param {Msg} msg
@ -285,14 +305,14 @@ function fromPlaintextBuffer(plaintextBuf, msg) {
module.exports = { module.exports = {
getMsgHash, getMsgHash,
getMsgId, getMsgId,
getFeedId, // getFeedId,
isFeedId, // isFeedId,
isMsg, // isMsg,
create, create,
createRoot,
stripAuthor,
toPlaintextBuffer, toPlaintextBuffer,
fromPlaintextBuffer, fromPlaintextBuffer,
validate, validate,
validateOOO,
validateBatch, validateBatch,
validateOOOBatch,
} }

View File

@ -12,11 +12,8 @@ function validateShape(msg) {
if (typeof msg.metadata.who === 'undefined') { if (typeof msg.metadata.who === 'undefined') {
return new Error('invalid message: must have metadata.who') return new Error('invalid message: must have metadata.who')
} }
if (typeof msg.metadata.depth === 'undefined') { if (typeof msg.metadata.tangles !== 'object') {
return new Error('invalid message: must have metadata.depth') return new Error('invalid message: must have metadata.tangles')
}
if (typeof msg.metadata.prev === 'undefined') {
return new Error('invalid message: must have metadata.prev')
} }
if (typeof msg.metadata.proof === 'undefined') { if (typeof msg.metadata.proof === 'undefined') {
return new Error('invalid message: must have metadata.proof') return new Error('invalid message: must have metadata.proof')
@ -45,7 +42,9 @@ function validateMsgHash(str) {
try { try {
base58.decode(str) base58.decode(str)
} catch (err) { } catch (err) {
return new Error(`invalid message: msgHash ${str} should have been a base58 string`) return new Error(
`invalid message: msgHash ${str} should have been a base58 string`
)
} }
} }
@ -74,12 +73,13 @@ function validateSignature(msg) {
} }
} }
function validatePrev(msg, existingMsgs) { function validateTangle(msg, existingMsgs, tangleId) {
if (!msg.metadata.prev || !msg.metadata.prev[Symbol.iterator]) { const tangle = msg.metadata.tangles[tangleId]
if (!tangle?.prev || !Array.isArray(tangle.prev)) {
// prettier-ignore // prettier-ignore
return new Error('invalid message: prev must be an iterator, on feed: ' + msg.metadata.who); return new Error('invalid message: prev must be an array, on feed: ' + msg.metadata.who);
} }
for (const p of msg.metadata.prev) { for (const p of tangle.prev) {
if (typeof p !== 'string') { if (typeof p !== 'string') {
// prettier-ignore // prettier-ignore
return new Error('invalid message: prev must contain strings but found ' + p + ', on feed: ' + msg.metadata.who); return new Error('invalid message: prev must contain strings but found ' + p + ', on feed: ' + msg.metadata.who);
@ -103,21 +103,18 @@ function validatePrev(msg, existingMsgs) {
// prettier-ignore // prettier-ignore
return new Error('invalid message: prev ' + p + ' is not from the same type, on feed: ' + msg.metadata.who); return new Error('invalid message: prev ' + p + ' is not from the same type, on feed: ' + msg.metadata.who);
} }
if (existingMsg.metadata.depth >= msg.metadata.depth) { const existingDepth = existingMsg.metadata.tangles[tangleId]?.depth ?? 0
if (existingDepth >= tangle.depth) {
// prettier-ignore // prettier-ignore
return new Error('invalid message: depth of prev ' + p + ' is not lower, on feed: ' + msg.metadata.who); return new Error('invalid message: depth of prev ' + p + ' is not lower, on feed: ' + msg.metadata.who);
} }
} }
} }
function validateFirstPrev(msg) { function validateTangleRoot(msg, tangleId) {
if (!Array.isArray(msg.metadata.prev)) { if (msg.metadata.tangles[tangleId]) {
// prettier-ignore // prettier-ignore
return new Error('invalid message: prev must be an array, on feed: ' + msg.metadata.who); return new Error('invalid message: tangle root must not have self tangle data, on feed: ' + msg.metadata.who);
}
if (msg.metadata.prev.length !== 0) {
// prettier-ignore
return new Error('invalid message: prev of 1st msg must be an empty array, on feed: ' + msg.metadata.who);
} }
} }
@ -152,7 +149,7 @@ function validateContent(msg) {
// FIXME: if content does not exist, do nothing // FIXME: if content does not exist, do nothing
const { content } = msg const { content } = msg
if (!content) { if (!content) {
return new Error('invalid message: must have content') return
} }
if (Array.isArray(content)) { if (Array.isArray(content)) {
return new Error('invalid message: content must not be an array') return new Error('invalid message: content must not be an array')
@ -165,48 +162,28 @@ function validateContent(msg) {
// FIXME: validateDepth should be +1 of the max of prev depth // FIXME: validateDepth should be +1 of the max of prev depth
function validateSync(msg, existingMsgs) { function validateSync(msg, existingMsgs, msgHash, rootHash) {
let err let err
if ((err = validateShape(msg))) return err if ((err = validateShape(msg))) return err
if ((err = validateWho(msg))) return err if ((err = validateWho(msg))) return err
if ((err = validateWhen(msg))) return err if ((err = validateWhen(msg))) return err
if (msg.metadata.depth === 0) { if (msgHash === rootHash) {
if ((err = validateFirstPrev(msg))) return err if ((err = validateTangleRoot(msg))) return err
} else { } else {
if ((err = validatePrev(msg, existingMsgs))) return err if ((err = validateTangle(msg, existingMsgs, rootHash))) return err
} }
if ((err = validateContent(msg))) return err if ((err = validateContent(msg))) return err
if ((err = validateSignature(msg))) return err if ((err = validateSignature(msg))) return err
} }
// function validateOOOSync(nativeMsg, hmacKey) { function validate(msg, existingMsgs, msgHash, rootHash, cb) {
// let err
// if ((err = validateShape(nativeMsg))) return err
// if ((err = validateHmac(hmacKey))) return err
// if ((err = validateAuthor(nativeMsg))) return err
// if ((err = validateHash(nativeMsg))) return err
// if ((err = validateOrder(nativeMsg))) return err
// if ((err = validateContent(nativeMsg))) return err
// if ((err = validateAsJSON(nativeMsg))) return err
// if ((err = validateSignature(nativeMsg, hmacKey))) return err
// }
function validate(msg, existingMsgs, cb) {
let err let err
if ((err = validateSync(msg, existingMsgs))) { if ((err = validateSync(msg, existingMsgs, msgHash, rootHash))) {
return cb(err) return cb(err)
} }
cb() cb()
} }
// function validateOOO(nativeMsg, hmacKey, cb) {
// let err
// if ((err = validateOOOSync(nativeMsg, hmacKey))) {
// return cb(err)
// }
// cb()
// }
// function validateBatch(nativeMsgs, prevNativeMsg, hmacKey, cb) { // function validateBatch(nativeMsgs, prevNativeMsg, hmacKey, cb) {
// let err // let err
// let prev = prevNativeMsg // let prev = prevNativeMsg
@ -218,15 +195,6 @@ function validate(msg, existingMsgs, cb) {
// cb() // cb()
// } // }
// function validateOOOBatch(nativeMsgs, hmacKey, cb) {
// let err
// for (const nativeMsg of nativeMsgs) {
// err = validateOOOSync(nativeMsg, hmacKey)
// if (err) return cb(err)
// }
// cb()
// }
module.exports = { module.exports = {
validateType, validateType,
validateContent, validateContent,
@ -234,6 +202,4 @@ module.exports = {
validate, validate,
validateMsgHash, validateMsgHash,
// validateBatch, // validateBatch,
// validateOOO,
// validateOOOBatch,
} }

View File

@ -4,7 +4,7 @@ const AAOL = require('async-append-only-log')
const promisify = require('promisify-4loc') const promisify = require('promisify-4loc')
const Obz = require('obz') const Obz = require('obz')
const FeedV1 = require('./feed-v1') const FeedV1 = require('./feed-v1')
const { ReadyGate } = require('./utils') const { ReadyGate, isEmptyObject } = require('./utils')
const { decrypt } = require('./encryption') const { decrypt } = require('./encryption')
/** /**
@ -48,33 +48,6 @@ exports.init = function initDB(peer, config) {
const encryptionFormats = new Map() const encryptionFormats = new Map()
const onRecordAdded = Obz() const onRecordAdded = Obz()
const msgsPerFeed = {
_mapAll: new Map(), // who => Set<MsgHash>
_byHash: new Map(), // msgId => Msg // TODO: optimize space usage of this??
update(msg, msgHash) {
const feedId = FeedV1.getFeedId(msg)
const setAll = this._mapAll.get(feedId) ?? new Set()
setAll.add(msgHash)
this._mapAll.set(feedId, setAll)
this._byHash.set(msgHash, msg)
},
getAll(feedId) {
const map = new Map()
for (const msgHash of this._mapAll.get(feedId) ?? []) {
const msg = this._byHash.get(msgHash)
if (msg) map.set(msgHash, msg)
}
return map
},
deleteMsg(msg) {
const feedId = FeedV1.getFeedId(msg)
const msgHash = FeedV1.getMsgHash(msg)
const setAll = this._mapAll.get(feedId)
setAll.delete(msgHash)
this._byHash.delete(msgHash)
},
}
const log = AAOL(path.join(config.path, 'db.bin'), { const log = AAOL(path.join(config.path, 'db.bin'), {
cacheSize: 1, cacheSize: 1,
blockSize: 64 * 1024, blockSize: 64 * 1024,
@ -124,8 +97,6 @@ exports.init = function initDB(peer, config) {
rec.misc.size = size rec.misc.size = size
rec.misc.seq = i rec.misc.seq = i
recs.push(rec) recs.push(rec)
msgsPerFeed.update(rec.msg)
}, },
function drainEnd(err) { function drainEnd(err) {
// prettier-ignore // prettier-ignore
@ -175,20 +146,17 @@ exports.init = function initDB(peer, config) {
return encryptionFormat return encryptionFormat
} }
function add(msg, cb) { function add(msg, tangleRootHash, cb) {
const feedId = FeedV1.getFeedId(msg)
// TODO: optimize this. This may be slow if you're adding many msgs in a // TODO: optimize this. This may be slow if you're adding many msgs in a
// row, because `getAll()` creates a new Map() each time. // row, because it creates a new Map() each time.
const existingMsgs = msgsPerFeed.getAll(feedId) const tangleMsgs = populateTangle(tangleRootHash)
FeedV1.validate(msg, existingMsgs, validationCB) const msgHash = FeedV1.getMsgHash(msg)
FeedV1.validate(msg, tangleMsgs, msgHash, tangleRootHash, validationCB)
function validationCB(err) { function validationCB(err) {
// prettier-ignore // prettier-ignore
if (err) return cb(new Error('add() failed validation for feed format v1', {cause: err})) if (err) return cb(new Error('add() failed validation for feed format v1', {cause: err}))
const msgHash = FeedV1.getMsgHash(msg)
msgsPerFeed.update(msg, msgHash)
logAppend(msgHash, msg, logAppendCB) logAppend(msgHash, msg, logAppendCB)
} }
@ -199,6 +167,16 @@ exports.init = function initDB(peer, config) {
} }
} }
function getFeedRoot(findWho, findType) {
for (const rec of records()) {
const { who, type, tangles } = rec.msg.metadata
if (who === findWho && type === findType && isEmptyObject(tangles)) {
return rec.hash
}
}
return null
}
function populateTangle(tangleId) { function populateTangle(tangleId) {
const map = new Map() const map = new Map()
for (const rec of records()) { for (const rec of records()) {
@ -212,7 +190,7 @@ exports.init = function initDB(peer, config) {
function populateTangles(tangleIds) { function populateTangles(tangleIds) {
const tangles = {} const tangles = {}
for (const tangleId of tangleIds) { for (const tangleId of tangleIds) {
tangles[tangleId] = populateTangle(tangleId) tangles[tangleId] ??= populateTangle(tangleId)
} }
return tangles return tangles
} }
@ -230,23 +208,22 @@ exports.init = function initDB(peer, config) {
if (!opts.content) return cb(new Error('create() requires a `content`')) if (!opts.content) return cb(new Error('create() requires a `content`'))
if (!opts.type) return cb(new Error('create() requires a `type`')) if (!opts.type) return cb(new Error('create() requires a `type`'))
// Create full opts: const feedRootHash = getFeedRoot(FeedV1.stripAuthor(keys.id), opts.type)
const tangles = populateTangles(opts.tangles ?? []) if (!feedRootHash) {
let tempMsg const feedRoot = FeedV1.createRoot(keys, opts.type)
try { add(feedRoot, FeedV1.getMsgHash(feedRoot), (err) => {
tempMsg = FeedV1.create({ // prettier-ignore
when: Date.now(), if (err) return cb(new Error('create() failed to create root', {cause: err}));
...opts, create(opts, cb)
tangles,
existing: [],
keys,
}) })
} catch (err) { return
return cb(new Error('create() failed', { cause: err }))
} }
const feedId = FeedV1.getFeedId(tempMsg)
const existing = msgsPerFeed.getAll(feedId) // Fill-in tangle opts:
const fullOpts = { when: Date.now(), ...opts, tangles, existing, keys } const tangleTemplates = opts.tangles ?? []
tangleTemplates.push(feedRootHash)
const tangles = populateTangles(tangleTemplates)
const fullOpts = { when: Date.now(), ...opts, tangles, keys }
// If opts ask for encryption, encrypt and put ciphertext in opts.content // If opts ask for encryption, encrypt and put ciphertext in opts.content
const recps = fullOpts.content.recps const recps = fullOpts.content.recps
@ -276,7 +253,6 @@ exports.init = function initDB(peer, config) {
return cb(new Error('create() failed', { cause: err })) return cb(new Error('create() failed', { cause: err }))
} }
const msgHash = FeedV1.getMsgHash(msg) const msgHash = FeedV1.getMsgHash(msg)
msgsPerFeed.update(msg, msgHash)
// Encode the native message and append it to the log: // Encode the native message and append it to the log:
logAppend(msgHash, msg, (err, rec) => { logAppend(msgHash, msg, (err, rec) => {
@ -289,7 +265,6 @@ exports.init = function initDB(peer, config) {
function del(msgId, cb) { function del(msgId, cb) {
const rec = getRecord(msgId) const rec = getRecord(msgId)
msgsPerFeed.deleteMsg(rec.msg)
const { offset, size, seq } = rec.misc const { offset, size, seq } = rec.misc
recs[rec.misc.seq] = { misc: { offset, size, seq } } recs[rec.misc.seq] = { misc: { offset, size, seq } }
log.onDrain(() => { log.onDrain(() => {
@ -317,7 +292,7 @@ exports.init = function initDB(peer, config) {
const rec = recs[i] const rec = recs[i]
if (!rec) continue if (!rec) continue
if (isUri && msgId.endsWith(rec.hash)) return rec if (isUri && msgId.endsWith(rec.hash)) return rec
else if (!isUri && rec.hash.endsWith(msgId)) return rec else if (!isUri && rec.hash === msgId) return rec
} }
return null return null
} }

View File

@ -18,4 +18,12 @@ class ReadyGate {
} }
} }
module.exports = { ReadyGate } function isEmptyObject(obj) {
for (const _key in obj) {
return false
}
return true
}
module.exports = { ReadyGate, isEmptyObject }

View File

@ -20,15 +20,23 @@ test('add()', async (t) => {
await peer.db.loaded() await peer.db.loaded()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const recRoot = await p(peer.db.add)(rootMsg, rootHash)
t.equals(recRoot.msg.metadata.when, 0, 'root msg added')
const inputMsg = FeedV1.create({ const inputMsg = FeedV1.create({
keys, keys,
when: 1514517067954, when: 1514517067954,
type: 'post', type: 'post',
content: { text: 'This is the first post!' }, content: { text: 'This is the first post!' },
existing: [], tangles: {
[rootHash]: new Map([[FeedV1.getMsgHash(rootMsg), rootMsg]]),
},
}) })
const rec = await p(peer.db.add)(inputMsg) const rec = await p(peer.db.add)(inputMsg, rootHash)
t.equal(rec.msg.content.text, 'This is the first post!') t.equal(rec.msg.content.text, 'This is the first post!')
await p(peer.close)(true) await p(peer.close)(true)

View File

@ -23,6 +23,8 @@ test('setup', async (t) => {
await peer.db.loaded() await peer.db.loaded()
}) })
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
let msgHash1 let msgHash1
let rec1 let rec1
let msgHash2 let msgHash2
@ -32,6 +34,17 @@ test('create()', async (t) => {
content: { text: 'I am 1st post' }, content: { text: 'I am 1st post' },
}) })
t.equal(rec1.msg.content.text, 'I am 1st post', 'msg1 text correct') t.equal(rec1.msg.content.text, 'I am 1st post', 'msg1 text correct')
t.equal(
rec1.msg.metadata.tangles[rootHash].depth,
1,
'msg1 tangle depth correct'
)
t.deepEquals(
rec1.msg.metadata.tangles[rootHash].prev,
[rootHash],
'msg1 tangle prev correct'
)
msgHash1 = FeedV1.getMsgHash(rec1.msg) msgHash1 = FeedV1.getMsgHash(rec1.msg)
const rec2 = await p(peer.db.create)({ const rec2 = await p(peer.db.create)({
@ -39,7 +52,16 @@ test('create()', async (t) => {
content: { text: 'I am 2nd post' }, content: { text: 'I am 2nd post' },
}) })
t.equal(rec2.msg.content.text, 'I am 2nd post', 'msg2 text correct') t.equal(rec2.msg.content.text, 'I am 2nd post', 'msg2 text correct')
t.deepEquals(rec2.msg.metadata.prev, [msgHash1], 'msg2 prev correct') t.equal(
rec2.msg.metadata.tangles[rootHash].depth,
2,
'msg2 tangle depth correct'
)
t.deepEquals(
rec2.msg.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 tangle prev correct'
)
msgHash2 = FeedV1.getMsgHash(rec2.msg) msgHash2 = FeedV1.getMsgHash(rec2.msg)
}) })
@ -49,10 +71,15 @@ test('add() forked then create() merged', async (t) => {
when: Date.now(), when: Date.now(),
type: 'post', type: 'post',
content: { text: '3rd post forked from 1st' }, content: { text: '3rd post forked from 1st' },
existing: [rec1.msg], tangles: {
[rootHash]: new Map([
[rootHash, rootMsg],
[rec1.hash, rec1.msg],
]),
},
}) })
const rec3 = await p(peer.db.add)(msg3) const rec3 = await p(peer.db.add)(msg3, rootHash)
const msgHash3 = FeedV1.getMsgHash(rec3.msg) const msgHash3 = FeedV1.getMsgHash(rec3.msg)
const rec4 = await p(peer.db.create)({ const rec4 = await p(peer.db.create)({
@ -61,9 +88,9 @@ test('add() forked then create() merged', async (t) => {
}) })
t.ok(rec4, '4th post created') t.ok(rec4, '4th post created')
t.deepEquals( t.deepEquals(
rec4.msg.metadata.prev, rec4.msg.metadata.tangles[rootHash].prev,
[msgHash2, msgHash3], [rootHash, msgHash2, msgHash3],
'msg4 prev is msg2 and msg3' 'msg4 prev is root, msg2 and msg3'
) )
}) })

View File

@ -31,7 +31,7 @@ test('del', async (t) => {
const before = [] const before = []
for (const msg of peer.db.msgs()) { for (const msg of peer.db.msgs()) {
before.push(msg.content.text) if (msg.content) before.push(msg.content.text)
} }
t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], 'msgs before the delete') t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], 'msgs before the delete')
@ -40,7 +40,7 @@ test('del', async (t) => {
const after = [] const after = []
for (const msg of peer.db.msgs()) { for (const msg of peer.db.msgs()) {
after.push(msg.content.text) if (msg.content) after.push(msg.content.text)
} }
t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], 'msgs after the delete') t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], 'msgs after the delete')
@ -78,7 +78,7 @@ test('del', async (t) => {
}) })
t.deepEqual( t.deepEqual(
persistedMsgs.map((msg) => msg.content.text), persistedMsgs.filter((msg) => msg.content).map((msg) => msg.content.text),
['m0', 'm1', 'm3', 'm4'], ['m0', 'm1', 'm3', 'm4'],
'msgs in disk after the delete' 'msgs in disk after the delete'
) )

View File

@ -2,6 +2,24 @@ const tape = require('tape')
const FeedV1 = require('../lib/feed-v1') const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util') const { generateKeypair } = require('./util')
let rootMsg = null
let rootHash = null
tape('FeedV1.createRoot()', (t) => {
const keys = generateKeypair('alice')
rootMsg = FeedV1.createRoot(keys, 'post')
t.equals(rootMsg.content, null, 'content')
t.equals(rootMsg.metadata.proof, '', 'proof')
t.equals(rootMsg.metadata.size, 0, 'size')
t.equals(rootMsg.metadata.type, 'post', 'type')
t.equals(rootMsg.metadata.who, FeedV1.stripAuthor(keys.id), 'who')
t.equals(rootMsg.metadata.when, 0, 'when')
t.deepEquals(rootMsg.metadata.tangles, {}, 'tangles')
rootHash = FeedV1.getMsgHash(rootMsg)
t.equals(rootHash, 'PpkBfa8C4sB8wHrqiNmHqe', 'root hash')
t.end()
})
tape('FeedV1.create()', (t) => { tape('FeedV1.create()', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const content = { text: 'Hello world!' } const content = { text: 'Hello world!' }
@ -11,12 +29,14 @@ tape('FeedV1.create()', (t) => {
keys, keys,
content, content,
type: 'post', type: 'post',
existing: [], tangles: {
[rootHash]: new Map([[rootHash, rootMsg]]),
},
when, when,
}) })
t.deepEquals( t.deepEquals(
Object.keys(msg1.metadata), Object.keys(msg1.metadata),
['depth', 'prev', 'proof', 'size', 'type', 'who', 'when'], ['proof', 'size', 'tangles', 'type', 'who', 'when'],
'metadata fields' 'metadata fields'
) )
t.equals( t.equals(
@ -25,16 +45,17 @@ tape('FeedV1.create()', (t) => {
'metadata.who' 'metadata.who'
) )
t.equals(msg1.metadata.type, 'post', 'metadata.type') t.equals(msg1.metadata.type, 'post', 'metadata.type')
t.equals(msg1.metadata.depth, 0, 'metadata.depth')
t.deepEquals(msg1.metadata.prev, [], 'metadata.prev')
t.deepEquals(msg1.metadata.proof, '9R7XmBhHF5ooPg34j9TQcz', 'metadata.proof') t.deepEquals(msg1.metadata.proof, '9R7XmBhHF5ooPg34j9TQcz', 'metadata.proof')
t.deepEquals(Object.keys(msg1.metadata.tangles), [rootHash], 'tangles')
t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'tangle depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'tangle prev')
t.deepEquals(msg1.metadata.size, 23, 'metadata.size') t.deepEquals(msg1.metadata.size, 23, 'metadata.size')
t.equals(typeof msg1.metadata.when, 'number', 'metadata.when') t.equals(typeof msg1.metadata.when, 'number', 'metadata.when')
t.deepEqual(msg1.content, content, 'content is correct') t.deepEqual(msg1.content, content, 'content is correct')
console.log(msg1) console.log(msg1)
const msgHash1 = '9cYegpVpddoMSdvSf53dTH' const msgHash1 = 'YWbEeMtcU4eNwF6uJVTrKE'
t.equals( t.equals(
FeedV1.getMsgId(msg1), FeedV1.getMsgId(msg1),
@ -49,12 +70,17 @@ tape('FeedV1.create()', (t) => {
keys, keys,
content: content2, content: content2,
type: 'post', type: 'post',
existing: new Map([[msgHash1, msg1]]), tangles: {
[rootHash]: new Map([
[rootHash, rootMsg],
[msgHash1, msg1],
]),
},
when: when + 1, when: when + 1,
}) })
t.deepEquals( t.deepEquals(
Object.keys(msg2.metadata), Object.keys(msg2.metadata),
['depth', 'prev', 'proof', 'size', 'type', 'who', 'when'], ['proof', 'size', 'tangles', 'type', 'who', 'when'],
'metadata keys' 'metadata keys'
) )
t.equals( t.equals(
@ -63,8 +89,9 @@ tape('FeedV1.create()', (t) => {
'metadata.who' 'metadata.who'
) )
t.equals(msg2.metadata.type, 'post', 'metadata.type') t.equals(msg2.metadata.type, 'post', 'metadata.type')
t.equals(msg2.metadata.depth, 1, 'metadata.depth') t.deepEquals(Object.keys(msg1.metadata.tangles), [rootHash], 'tangles')
t.deepEquals(msg2.metadata.prev, [msgHash1], 'metadata.prev') t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'tangle depth')
t.deepEquals(msg2.metadata.tangles[rootHash].prev, [msgHash1], 'tangle prev')
t.deepEquals(msg2.metadata.proof, 'XuZEzH1Dhy1yuRMcviBBcN', 'metadata.proof') t.deepEquals(msg2.metadata.proof, 'XuZEzH1Dhy1yuRMcviBBcN', 'metadata.proof')
t.deepEquals(msg2.metadata.size, 21, 'metadata.size') t.deepEquals(msg2.metadata.size, 21, 'metadata.size')
t.equals(typeof msg2.metadata.when, 'number', 'metadata.when') t.equals(typeof msg2.metadata.when, 'number', 'metadata.when')
@ -74,7 +101,7 @@ tape('FeedV1.create()', (t) => {
t.deepEqual( t.deepEqual(
FeedV1.getMsgId(msg2), FeedV1.getMsgId(msg2),
'ppppp:message/v1/4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW/post/LEH1JVENvJgSpBBrVUwJx6', 'ppppp:message/v1/4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW/post/R9XRXBL1ntSKRrrk86bhn8',
'getMsgId' 'getMsgId'
) )
@ -84,17 +111,23 @@ tape('FeedV1.create()', (t) => {
tape('create() handles DAG tips correctly', (t) => { tape('create() handles DAG tips correctly', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const when = 1652037377204 const when = 1652037377204
const existing = new Map() const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content: { text: '1' }, content: { text: '1' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: when + 1, when: when + 1,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
t.deepEquals(msg1.metadata.prev, [], 'msg1.prev is empty') t.deepEquals(
msg1.metadata.tangles[rootHash].prev,
['PpkBfa8C4sB8wHrqiNmHqe'],
'msg1.prev is root'
)
existing.set(msgHash1, msg1) existing.set(msgHash1, msg1)
@ -102,20 +135,33 @@ tape('create() handles DAG tips correctly', (t) => {
keys, keys,
content: { text: '2A' }, content: { text: '2A' },
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 2, when: when + 2,
}) })
t.deepEquals(msg2A.metadata.prev, [msgHash1], 'msg2A.prev is msg1') t.deepEquals(
msg2A.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2A.prev is msg1'
)
const msg2B = FeedV1.create({ const msg2B = FeedV1.create({
keys, keys,
content: { text: '2B' }, content: { text: '2B' },
type: 'post', type: 'post',
tangles: {
[rootHash]: existing,
},
existing, existing,
when: when + 2, when: when + 2,
}) })
const msgHash2B = FeedV1.getMsgHash(msg2B) const msgHash2B = FeedV1.getMsgHash(msg2B)
t.deepEquals(msg2B.metadata.prev, [msgHash1], 'msg2B.prev is msg1') t.deepEquals(
msg2B.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2B.prev is msg1'
)
existing.set(msgHash2B, msg2B) existing.set(msgHash2B, msg2B)
@ -123,11 +169,17 @@ tape('create() handles DAG tips correctly', (t) => {
keys, keys,
content: { text: '3' }, content: { text: '3' },
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 3, when: when + 3,
}) })
const msgHash3 = FeedV1.getMsgHash(msg3) const msgHash3 = FeedV1.getMsgHash(msg3)
t.deepEquals(msg3.metadata.prev, [msgHash2B], 'msg3.prev is msg2B') t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2B],
'msg3.prev is root(lipmaa),msg2B(previous)'
)
existing.set(msgHash3, msg3) existing.set(msgHash3, msg3)
const msgHash2A = FeedV1.getMsgHash(msg2A) const msgHash2A = FeedV1.getMsgHash(msg2A)
@ -138,13 +190,15 @@ tape('create() handles DAG tips correctly', (t) => {
keys, keys,
content: { text: '4' }, content: { text: '4' },
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 4, when: when + 4,
}) })
t.deepEquals( t.deepEquals(
msg4.metadata.prev, msg4.metadata.tangles[rootHash].prev,
[msgHash1, msgHash3, msgHash2A], [msgHash3, msgHash2A],
'msg4.prev is [msg1(lipmaa),msg3(previous),msg2A(old fork as tip)]' 'msg4.prev is [msg3(previous),msg2A(old fork as tip)]'
) )
t.end() t.end()

View File

@ -3,84 +3,67 @@ const base58 = require('bs58')
const FeedV1 = require('../lib/feed-v1') const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util') const { generateKeypair } = require('./util')
tape('invalid 1st msg with non-array prev', (t) => { tape('invalid msg with non-array prev', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg = FeedV1.create({ const msg = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
msg.metadata.prev = null msg.metadata.tangles[rootHash].prev = null
const msgHash = FeedV1.getMsgHash(msg)
FeedV1.validate(msg, new Map(), (err) => { FeedV1.validate(msg, existing, msgHash, rootHash, (err) => {
t.ok(err, 'invalid 2nd msg throws') t.ok(err, 'invalid 2nd msg throws')
t.match(err.message, /prev must be an array/, 'invalid 2nd msg description') t.match(err.message, /prev must be an array/, 'invalid 2nd msg description')
t.end() t.end()
}) })
}) })
tape('invalid msg with non-array prev', (t) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030002000,
})
msg2.metadata.prev = null
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
/prev must be an array/,
'invalid 2nd msg description'
)
t.end()
})
})
tape('invalid msg with bad prev', (t) => { tape('invalid msg with bad prev', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
const msg2 = FeedV1.create({ const msg2 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030002000, when: 1652030002000,
}) })
msg2.metadata.depth = 1 msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.prev = [1234] msg2.metadata.tangles[rootHash].prev = [1234]
const msgHash2 = FeedV1.getMsgHash(msg2)
const existing = new Map() FeedV1.validate(msg2, existing, msgHash2, rootHash, (err) => {
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
t.ok(err, 'invalid 2nd msg throws') t.ok(err, 'invalid 2nd msg throws')
t.match( t.match(
err.message, err.message,
@ -94,30 +77,39 @@ tape('invalid msg with bad prev', (t) => {
tape('invalid msg with URI in prev', (t) => { tape('invalid msg with URI in prev', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
const msg2 = FeedV1.create({ const msg2 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030002000, when: 1652030002000,
}) })
const msgHash2 = FeedV1.getMsgHash(msg2)
const randBuf = Buffer.alloc(16).fill(16) const randBuf = Buffer.alloc(16).fill(16)
const fakeMsgKey1 = `ppppp:message/v1/${base58.encode(randBuf)}` const fakeMsgKey1 = `ppppp:message/v1/${base58.encode(randBuf)}`
msg2.metadata.depth = 1 msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.prev = [fakeMsgKey1] msg2.metadata.tangles[rootHash].prev = [fakeMsgKey1]
const existing = new Map() FeedV1.validate(msg2, existing, msgHash2, rootHash, (err) => {
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
t.ok(err, 'invalid 2nd msg throws') t.ok(err, 'invalid 2nd msg throws')
t.match( t.match(
err.message, err.message,
@ -131,20 +123,30 @@ tape('invalid msg with URI in prev', (t) => {
tape('invalid msg with unknown prev', (t) => { tape('invalid msg with unknown prev', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
const unknownMsg = FeedV1.create({ const unknownMsg = FeedV1.create({
keys, keys,
content: { text: 'Alien' }, content: { text: 'Alien' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
const unknownMsgHash = FeedV1.getMsgHash(unknownMsg) const unknownMsgHash = FeedV1.getMsgHash(unknownMsg)
@ -153,13 +155,14 @@ tape('invalid msg with unknown prev', (t) => {
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map([[unknownMsgHash, unknownMsg]]), tangles: {
[rootHash]: new Map([[rootHash, rootMsg], [unknownMsgHash, unknownMsg]]),
},
when: 1652030002000, when: 1652030002000,
}) })
const msgHash2 = FeedV1.getMsgHash(msg2)
const existing = new Map() FeedV1.validate(msg2, existing, msgHash2, rootHash, (err) => {
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
t.ok(err, 'invalid 2nd msg throws') t.ok(err, 'invalid 2nd msg throws')
t.match( t.match(
err.message, err.message,

View File

@ -12,7 +12,6 @@ tape('invalid type not a string', (t) => {
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
when: 1652037377204, when: 1652037377204,
type: 123, type: 123,
existing: new Map(),
}) })
}, },
/type is not a string/, /type is not a string/,
@ -31,7 +30,6 @@ tape('invalid type with "/" character', (t) => {
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
when: 1652037377204, when: 1652037377204,
type: 'group/init', type: 'group/init',
existing: new Map(),
}) })
}, },
/invalid type/, /invalid type/,
@ -50,7 +48,6 @@ tape('invalid type with "*" character', (t) => {
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
when: 1652037377204, when: 1652037377204,
type: 'star*', type: 'star*',
existing: new Map(),
}) })
}, },
/invalid type/, /invalid type/,
@ -69,7 +66,6 @@ tape('invalid type too short', (t) => {
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
when: 1652037377204, when: 1652037377204,
type: 'xy', type: 'xy',
existing: new Map(),
}) })
}, },
/shorter than 3/, /shorter than 3/,
@ -88,7 +84,6 @@ tape('invalid type too long', (t) => {
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
when: 1652037377204, when: 1652037377204,
type: 'a'.repeat(120), type: 'a'.repeat(120),
existing: new Map(),
}) })
}, },
/100\+ characters long/, /100\+ characters long/,

View File

@ -8,64 +8,132 @@ tape('lipmaa prevs', (t) => {
const when = 1652037377204 const when = 1652037377204
const existing = new Map() const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content, content,
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: when + 1, when: when + 1,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1) existing.set(msgHash1, msg1)
t.deepEquals(msg1.metadata.prev, [], 'msg1.prev is empty') t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'msg1 depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'msg1 prev')
const msg2 = FeedV1.create({ const msg2 = FeedV1.create({
keys, keys,
content, content,
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 2, when: when + 2,
}) })
const msgHash2 = FeedV1.getMsgHash(msg2) const msgHash2 = FeedV1.getMsgHash(msg2)
existing.set(msgHash2, msg2) existing.set(msgHash2, msg2)
t.deepEquals(msg2.metadata.prev, [msgHash1], 'msg2.prev is msg1') t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'msg2 depth')
t.deepEquals(
msg2.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 prev'
)
const msg3 = FeedV1.create({ const msg3 = FeedV1.create({
keys, keys,
content, content,
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 3, when: when + 3,
}) })
const msgHash3 = FeedV1.getMsgHash(msg3) const msgHash3 = FeedV1.getMsgHash(msg3)
existing.set(msgHash3, msg3) existing.set(msgHash3, msg3)
t.deepEquals(msg3.metadata.prev, [msgHash2], 'msg3.prev is msg2') t.equals(msg3.metadata.tangles[rootHash].depth, 3, 'msg3 depth')
t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2],
'msg3 prev (has lipmaa!)'
)
const msg4 = FeedV1.create({ const msg4 = FeedV1.create({
keys, keys,
content, content,
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 4, when: when + 4,
}) })
const msgHash4 = FeedV1.getMsgHash(msg4) const msgHash4 = FeedV1.getMsgHash(msg4)
existing.set(msgHash4, msg4) existing.set(msgHash4, msg4)
t.equals(msg4.metadata.tangles[rootHash].depth, 4, 'msg4 depth')
t.deepEquals( t.deepEquals(
msg4.metadata.prev, msg4.metadata.tangles[rootHash].prev,
[msgHash1, msgHash3], [msgHash3],
'msg4.prev is msg1 and msg3' 'msg4 prev'
) )
const msg5 = FeedV1.create({ const msg5 = FeedV1.create({
keys, keys,
content, content,
type: 'post', type: 'post',
existing, tangles: {
[rootHash]: existing,
},
when: when + 5, when: when + 5,
}) })
const msgHash5 = FeedV1.getMsgHash(msg5) const msgHash5 = FeedV1.getMsgHash(msg5)
existing.set(msgHash5, msg5) existing.set(msgHash5, msg5)
t.deepEquals(msg5.metadata.prev, [msgHash4], 'msg5.prev is msg4') t.equals(msg5.metadata.tangles[rootHash].depth, 5, 'msg5 depth')
t.deepEquals(
msg5.metadata.tangles[rootHash].prev,
[msgHash4],
'msg5 prev'
)
const msg6 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: existing,
},
when: when + 6,
})
const msgHash6 = FeedV1.getMsgHash(msg6)
existing.set(msgHash6, msg6)
t.equals(msg6.metadata.tangles[rootHash].depth, 6, 'msg6 depth')
t.deepEquals(
msg6.metadata.tangles[rootHash].prev,
[msgHash5],
'msg6 prev'
)
const msg7 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: existing,
},
when: when + 7,
})
const msgHash7 = FeedV1.getMsgHash(msg7)
existing.set(msgHash7, msg7)
t.equals(msg7.metadata.tangles[rootHash].depth, 7, 'msg7 depth')
t.deepEquals(
msg7.metadata.tangles[rootHash].prev,
[msgHash3, msgHash6],
'msg7 prev (has lipmaa!)'
)
t.end() t.end()
}) })

View File

@ -5,16 +5,32 @@ const { generateKeypair } = require('./util')
tape('simple multi-author tangle', (t) => { tape('simple multi-author tangle', (t) => {
const keysA = generateKeypair('alice') const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob') const keysB = generateKeypair('bob')
const existingA = new Map()
const existingB = new Map()
const rootMsgA = FeedV1.createRoot(keysA, 'post')
const rootHashA = FeedV1.getMsgHash(rootMsgA)
existingA.set(rootHashA, rootMsgA)
const rootMsgB = FeedV1.createRoot(keysB, 'post')
const rootHashB = FeedV1.getMsgHash(rootMsgB)
existingB.set(rootHashB, rootMsgB)
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys: keysA, keys: keysA,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHashA]: existingA,
},
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
t.notOk(msg1.metadata.tangles, 'msg1 has no extra tangles') t.deepEquals(
Object.keys(msg1.metadata.tangles),
[rootHashA],
'msg1 has only feed tangle'
)
const msg2 = FeedV1.create({ const msg2 = FeedV1.create({
keys: keysB, keys: keysB,
@ -22,12 +38,24 @@ tape('simple multi-author tangle', (t) => {
type: 'post', type: 'post',
existing: new Map(), existing: new Map(),
tangles: { tangles: {
[rootHashB]: existingB,
[msgHash1]: new Map([[msgHash1, msg1]]), [msgHash1]: new Map([[msgHash1, msg1]]),
}, },
when: 1652030002000, when: 1652030002000,
}) })
t.ok(msg2.metadata.tangles, 'msg2 has extra tangles')
t.ok(msg2.metadata.tangles[msgHash1], 'msg2 has tangle for msgHash1') t.deepEquals(
Object.keys(msg2.metadata.tangles),
[rootHashB, msgHash1],
'msg2 has feed tangle and misc tangle'
)
t.equal(msg2.metadata.tangles[rootHashB].depth, 1, 'msg2 feed tangle depth')
t.deepEquals(
msg2.metadata.tangles[rootHashB].prev,
[rootHashB],
'msg2 feed tangle prev'
)
t.equal(msg2.metadata.tangles[msgHash1].depth, 1, 'msg2 has tangle depth 1') t.equal(msg2.metadata.tangles[msgHash1].depth, 1, 'msg2 has tangle depth 1')
t.deepEquals( t.deepEquals(
msg2.metadata.tangles[msgHash1].prev, msg2.metadata.tangles[msgHash1].prev,
@ -48,18 +76,29 @@ tape('lipmaa in multi-author tangle', (t) => {
const existingB = new Map() const existingB = new Map()
const tangleExisting = new Map() const tangleExisting = new Map()
const rootMsgA = FeedV1.createRoot(keysA, 'post')
const rootHashA = FeedV1.getMsgHash(rootMsgA)
existingA.set(rootHashA, rootMsgA)
const rootMsgB = FeedV1.createRoot(keysB, 'post')
const rootHashB = FeedV1.getMsgHash(rootMsgB)
existingB.set(rootHashB, rootMsgB)
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys: keysA, keys: keysA,
content, content,
type: 'post', type: 'post',
existing: existingA, tangles: {
[rootHashA]: existingA,
},
when: when + 1, when: when + 1,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
existingA.set(msgHash1, msg1) existingA.set(msgHash1, msg1)
tangleExisting.set(msgHash1, msg1) tangleExisting.set(msgHash1, msg1)
t.notOk(msg1.metadata.tangles, 'A:msg1 has no extra tangles') t.deepEquals(Object.keys(msg1.metadata.tangles),[rootHashA], 'A:msg1 has only feed tangle')
const msg2 = FeedV1.create({ const msg2 = FeedV1.create({
keys: keysB, keys: keysB,
@ -67,6 +106,7 @@ tape('lipmaa in multi-author tangle', (t) => {
type: 'post', type: 'post',
existing: existingB, existing: existingB,
tangles: { tangles: {
[rootHashB]: existingB,
[msgHash1]: tangleExisting, [msgHash1]: tangleExisting,
}, },
when: when + 2, when: when + 2,
@ -87,6 +127,7 @@ tape('lipmaa in multi-author tangle', (t) => {
type: 'post', type: 'post',
existing: existingB, existing: existingB,
tangles: { tangles: {
[rootHashB]: existingB,
[msgHash1]: tangleExisting, [msgHash1]: tangleExisting,
}, },
when: when + 3, when: when + 3,
@ -107,6 +148,7 @@ tape('lipmaa in multi-author tangle', (t) => {
type: 'post', type: 'post',
existing: existingA, existing: existingA,
tangles: { tangles: {
[rootHashA]: existingA,
[msgHash1]: tangleExisting, [msgHash1]: tangleExisting,
}, },
when: when + 4, when: when + 4,

View File

@ -3,106 +3,42 @@ const base58 = require('bs58')
const FeedV1 = require('../lib/feed-v1') const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util') const { generateKeypair } = require('./util')
tape('validate 1st msg', (t) => { tape('validate root msg', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const existing = new Map()
const msg = FeedV1.create({ const rootMsg = FeedV1.createRoot(keys, 'post')
keys, const rootHash = FeedV1.getMsgHash(rootMsg)
content: { text: 'Hello world!' }, existing.set(rootHash, rootMsg)
type: 'post',
existing: new Map(),
when: 1652030001000,
})
FeedV1.validate(msg, [], (err) => { FeedV1.validate(rootMsg, existing, rootHash, rootHash, (err) => {
if (err) console.log(err) if (err) console.log(err)
t.error(err, 'valid 1st msg') t.error(err, 'valid root msg')
t.end() t.end()
}) })
}) })
tape('validate 2nd msg with existing nativeMsg', (t) => { tape('validate 2nd msg with existing root', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
const existing = new Map()
existing.set(msgHash1, msg1) existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
if (err) console.log(err)
t.error(err, 'valid 2nd msg')
t.end()
})
})
tape('validate 2nd msg with existing msgId', (t) => { FeedV1.validate(msg1, existing, msgHash1, rootHash, (err) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
prev: [],
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
if (err) console.log(err)
t.error(err, 'valid 2nd msg')
t.end()
})
})
tape('validate 2nd msg with existing KVT', (t) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
if (err) console.log(err) if (err) console.log(err)
t.error(err, 'valid 2nd msg') t.error(err, 'valid 2nd msg')
t.end() t.end()
@ -112,36 +48,38 @@ tape('validate 2nd msg with existing KVT', (t) => {
tape('validate 2nd forked msg', (t) => { tape('validate 2nd forked msg', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const msg1 = FeedV1.create({ const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1A = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
tangles: {
[rootHash]: existing,
},
existing: new Map(), existing: new Map(),
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1A = FeedV1.getMsgHash(msg1A)
const msg2A = FeedV1.create({ const msg1B = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map([[msgHash1, msg1]]), tangles: {
[rootHash]: existing,
},
when: 1652030002000, when: 1652030002000,
}) })
const msgHash2A = FeedV1.getMsgHash(msg2A) const msgHash1B = FeedV1.getMsgHash(msg1B)
const msg2B = FeedV1.create({ existing.set(msgHash1A, msg1A)
keys, existing.set(msgHash1B, msg1B)
content: { text: 'Hello world!' }, FeedV1.validate(msg1B, existing, msgHash1B, rootHash, (err) => {
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030003000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
existing.set(msgHash2A, msg2A)
FeedV1.validate(msg2B, existing, (err) => {
if (err) console.log(err) if (err) console.log(err)
t.error(err, 'valid 2nd forked msg') t.error(err, 'valid 2nd forked msg')
t.end() t.end()
@ -151,29 +89,28 @@ tape('validate 2nd forked msg', (t) => {
tape('invalid msg with unknown previous', (t) => { tape('invalid msg with unknown previous', (t) => {
const keys = generateKeypair('alice') const keys = generateKeypair('alice')
const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1 = FeedV1.create({ const msg1 = FeedV1.create({
keys, keys,
content: { text: 'Hello world!' }, content: { text: 'Hello world!' },
type: 'post', type: 'post',
existing: new Map(), tangles: {
[rootHash]: existing,
},
when: 1652030001000, when: 1652030001000,
}) })
const msgHash1 = FeedV1.getMsgHash(msg1) const msgHash1 = FeedV1.getMsgHash(msg1)
const fakeMsgKey1 = base58.encode(Buffer.alloc(16).fill(42)) const fakeMsgHash = base58.encode(Buffer.alloc(16).fill(42))
const msg2 = FeedV1.create({ msg1.metadata.tangles[rootHash].prev = [fakeMsgHash]
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
msg2.metadata.prev = [fakeMsgKey1]
const existing = new Map() FeedV1.validate(msg1, existing, msgHash1, rootHash, (err) => {
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
t.ok(err, 'invalid 2nd msg throws') t.ok(err, 'invalid 2nd msg throws')
t.match( t.match(
err.message, err.message,

View File

@ -31,6 +31,7 @@ test('msgs() iterator', async (t) => {
const posts = [] const posts = []
const abouts = [] const abouts = []
for (const msg of peer.db.msgs()) { for (const msg of peer.db.msgs()) {
if (!msg.content) continue
if (msg.metadata.type === 'post') posts.push(msg.content.text) if (msg.metadata.type === 'post') posts.push(msg.content.text)
else if (msg.metadata.type === 'about') abouts.push(msg.content.name) else if (msg.metadata.type === 'about') abouts.push(msg.content.name)
} }

View File

@ -31,8 +31,10 @@ test('onRecordAdded', async (t) => {
await p(setTimeout)(500) await p(setTimeout)(500)
t.equal(listened.length, 1) t.equal(listened.length, 2)
t.deepEquals(listened, [rec1]) t.deepEquals(listened[0].msg.content, null, 'root')
t.deepEquals(listened[0].msg.metadata.when, 0, 'root')
t.deepEquals(listened[1], rec1, 'actual record')
remove() remove()
await p(peer.close)(true) await p(peer.close)(true)

View File

@ -46,6 +46,7 @@ test('create some msgs, close, re-open', async (t) => {
const texts = [] const texts = []
for (const msg of peer2.db.msgs()) { for (const msg of peer2.db.msgs()) {
if (!msg.content) continue
texts.push(msg.content.text) texts.push(msg.content.text)
} }

View File

@ -30,6 +30,7 @@ test('records() iterator', async (t) => {
let count = 0 let count = 0
for (const rec of peer.db.records()) { for (const rec of peer.db.records()) {
if (!rec.msg.content) continue
t.true(rec.misc.size > rec.msg.metadata.size) t.true(rec.misc.size > rec.msg.metadata.size)
count++ count++
} }