make feed depth/prev a regular tangle

This commit is contained in:
Andre Staltz 2023-04-10 22:08:24 +03:00
parent 3041ffc60a
commit 4e334e242c
17 changed files with 528 additions and 420 deletions

View File

@ -12,9 +12,7 @@ const {
validateType,
validateContent,
validate,
validateOOO,
validateBatch,
validateOOOBatch,
validateMsgHash,
} = require('./validation')
@ -23,7 +21,7 @@ const {
*/
/**
* @typedef {Object} TangleData
* @typedef {Object} TangleMetadata
* @property {number} depth
* @property {Array<string>} prev
*/
@ -32,80 +30,73 @@ const {
* @typedef {Object} Msg
* @property {*} content
* @property {Object} metadata
* @property {number} metadata.depth
* @property {Array<string>} metadata.prev
* @property {string} metadata.proof
* @property {number} metadata.size
* @property {Record<string, TangleData>=} metadata.tangles
* @property {string=} metadata.type
* @property {Record<string, TangleMetadata>} metadata.tangles
* @property {string} metadata.type
* @property {string} metadata.who
* @property {number=} metadata.when
* @property {number} metadata.when
* @property {string} sig
*/
/**
* @typedef {Object} Keys
* @property {string} keys.id
* @property {string} keys.private
*/
/**
* @typedef {Object} CreateOpts
* @property {*} content
* @property {string} type
* @property {number} when
* @property {Object} keys
* @property {Keys} keys
* @property {Record<string, MsgIter>} tangles
*/
/**
* @typedef {Object} CreateRootOpts
* @property {string} type
* @property {Keys} keys
* @property {string} keys.id
* @property {string} keys.private
* @property {MsgIter} existing
* @property {Record<string, MsgIter>=} tangles
*/
/**
* @param {Msg} msg
*/
function getFeedId(msg) {
if (msg.metadata.type) {
return `ppppp:feed/v1/${msg.metadata.who}/${msg.metadata.type}`
} else {
return `ppppp:feed/v1/${msg.metadata.who}`
}
}
// function getFeedId(msg) {
// if (msg.metadata.type) {
// return `ppppp:feed/v1/${msg.metadata.who}/${msg.metadata.type}`
// } else {
// return `ppppp:feed/v1/${msg.metadata.who}`
// }
// }
function isMsg(x) {
return (
typeof x === 'object' &&
!!x &&
typeof x.metadata.author === 'string' &&
x.metadata.author &&
typeof x.metadata.type === 'string' &&
x.metadata.type
)
}
// function isMsg(x) {
// return (
// typeof x === 'object' &&
// !!x &&
// typeof x.metadata.author === 'string' &&
// x.metadata.author &&
// typeof x.metadata.type === 'string' &&
// x.metadata.type
// )
// }
function isFeedId(author) {
if (typeof author !== 'string') return false
return author.startsWith('ppppp:feed/v1/')
}
// function isFeedId(author) {
// if (typeof author !== 'string') return false
// return author.startsWith('ppppp:feed/v1/')
// }
function toPlaintextBuffer(opts) {
return Buffer.from(stringify(opts.content), 'utf8')
}
function readDepth(msg, tangleId = null) {
if (tangleId) {
return msg.metadata.tangles?.[tangleId]?.depth ?? 0
} else {
return msg.metadata.depth
}
}
function readPrev(msg, tangleId = null) {
if (tangleId) {
return msg.metadata.tangles?.[tangleId]?.prev ?? []
} else {
return msg.metadata.prev
}
}
function calculateDepth(existing, tangleId = null) {
let max = -1
for (const msg of existing.values()) {
const depth = readDepth(msg, tangleId)
const depth = msg.metadata.tangles[tangleId]?.depth ?? 0
if (depth > max) {
max = depth
}
@ -148,7 +139,7 @@ function determineTips(existing, tangleId = null) {
}
for (const msg of existing.values()) {
const prev = readPrev(msg, tangleId)
const prev = msg.metadata.tangles[tangleId]?.prev ?? []
for (const p of prev) {
tips.delete(p)
}
@ -160,7 +151,7 @@ function calculatePrev(existing, depth, lipmaaDepth, tangleId = null) {
const prev = []
const tips = determineTips(existing, tangleId)
for (const msg of existing.values()) {
const msgDepth = readDepth(msg, tangleId)
const msgDepth = msg.metadata.tangles[tangleId]?.depth ?? 0
const msgHash = getMsgHash(msg)
if (
msgDepth === depth - 1 ||
@ -173,7 +164,12 @@ function calculatePrev(existing, depth, lipmaaDepth, tangleId = null) {
return prev
}
function prevalidateExisting(existing, tangleId = null) {
/**
* @param {MsgIter} existing
* @param {string} tangleId
* @returns
*/
function prevalidateExisting(existing, tangleId) {
if (!existing?.[Symbol.iterator]) {
// prettier-ignore
return new Error(`existing must be an iterator, but got ${typeof existing}`)
@ -182,6 +178,10 @@ function prevalidateExisting(existing, tangleId = null) {
// prettier-ignore
return new Error(`existing must be a Map, Set, or Array, but got ${existing}`)
}
if (!tangleId) {
// prettier-ignore
return new Error(`tangleId must be a string, but got ${typeof tangleId}`)
}
let isEmpty = true
let hasDepthZeroMsg = false
for (const p of existing.values()) {
@ -191,25 +191,16 @@ function prevalidateExisting(existing, tangleId = null) {
return new Error(`existing must contain messages, but got ${typeof p}`)
}
if (!tangleId && p.metadata.depth === 0) {
if (!p.metadata.tangles[tangleId] && getMsgHash(p) === tangleId) {
if (hasDepthZeroMsg) {
// prettier-ignore
return new Error(`existing must contain only 1 message with depth 0`)
} else {
hasDepthZeroMsg = true
}
} else if (tangleId) {
if (!p.metadata.tangles?.[tangleId] && getMsgHash(p) === tangleId) {
if (hasDepthZeroMsg) {
// prettier-ignore
return new Error(`existing must contain only 1 message with depth 0`)
} else {
hasDepthZeroMsg = true
}
} else if (!p.metadata.tangles?.[tangleId]) {
// prettier-ignore
return new Error(`existing must refer to the tangleId ${tangleId}`)
}
} else if (!p.metadata.tangles[tangleId]) {
// prettier-ignore
return new Error(`existing must refer to the tangleId ${tangleId}`)
}
}
if (!isEmpty && !hasDepthZeroMsg) {
@ -225,14 +216,11 @@ function prevalidateExisting(existing, tangleId = null) {
function create(opts) {
let err
if ((err = validateType(opts.type))) throw err
if ((err = prevalidateExisting(opts.existing))) throw err
if (!opts.tangles) throw new Error('opts.tangles is required')
const [proof, size] = representContent(opts.content)
const depth = calculateDepth(opts.existing)
const lipmaaDepth = lipmaa(depth + 1) - 1
const prev = calculatePrev(opts.existing, depth, lipmaaDepth)
let tangles = null
const tangles = {}
if (opts.tangles) {
for (const rootId in opts.tangles) {
if ((err = validateMsgHash(rootId))) throw err
@ -242,19 +230,19 @@ function create(opts) {
const depth = calculateDepth(existing, rootId)
const lipmaaDepth = lipmaa(depth + 1) - 1
const prev = calculatePrev(existing, depth, lipmaaDepth, rootId)
tangles ??= {}
tangles[rootId] = { depth, prev }
}
} else {
// prettier-ignore
throw new Error(`cannot create msg without tangles, that's the case for createRoot()`)
}
const msg = {
content: opts.content,
metadata: {
depth,
prev,
proof,
size,
...(tangles ? { tangles } : null),
tangles,
type: opts.type,
who: stripAuthor(opts.keys.id),
when: +opts.when,
@ -273,6 +261,38 @@ function create(opts) {
return msg
}
/**
* @param {Keys} keys
* @param {string} type
* @returns {Msg}
*/
function createRoot(keys, type) {
let err
if ((err = validateType(type))) throw err
const msg = {
content: null,
metadata: {
proof: '',
size: 0,
tangles: {},
type,
who: stripAuthor(keys.id),
when: 0,
},
sig: '',
}
const privateKey = Buffer.from(keys.private, 'base64')
// TODO: add a label prefix to the metadata before signing
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
// TODO: when signing, what's the point of a customizable hmac?
const sigBuf = ed25519.sign(privateKey, metadataBuf)
msg.sig = base58.encode(sigBuf)
return msg
}
/**
* @param {Buffer} plaintextBuf
* @param {Msg} msg
@ -285,14 +305,14 @@ function fromPlaintextBuffer(plaintextBuf, msg) {
module.exports = {
getMsgHash,
getMsgId,
getFeedId,
isFeedId,
isMsg,
// getFeedId,
// isFeedId,
// isMsg,
create,
createRoot,
stripAuthor,
toPlaintextBuffer,
fromPlaintextBuffer,
validate,
validateOOO,
validateBatch,
validateOOOBatch,
}

View File

@ -12,11 +12,8 @@ function validateShape(msg) {
if (typeof msg.metadata.who === 'undefined') {
return new Error('invalid message: must have metadata.who')
}
if (typeof msg.metadata.depth === 'undefined') {
return new Error('invalid message: must have metadata.depth')
}
if (typeof msg.metadata.prev === 'undefined') {
return new Error('invalid message: must have metadata.prev')
if (typeof msg.metadata.tangles !== 'object') {
return new Error('invalid message: must have metadata.tangles')
}
if (typeof msg.metadata.proof === 'undefined') {
return new Error('invalid message: must have metadata.proof')
@ -45,7 +42,9 @@ function validateMsgHash(str) {
try {
base58.decode(str)
} catch (err) {
return new Error(`invalid message: msgHash ${str} should have been a base58 string`)
return new Error(
`invalid message: msgHash ${str} should have been a base58 string`
)
}
}
@ -74,12 +73,13 @@ function validateSignature(msg) {
}
}
function validatePrev(msg, existingMsgs) {
if (!msg.metadata.prev || !msg.metadata.prev[Symbol.iterator]) {
function validateTangle(msg, existingMsgs, tangleId) {
const tangle = msg.metadata.tangles[tangleId]
if (!tangle?.prev || !Array.isArray(tangle.prev)) {
// prettier-ignore
return new Error('invalid message: prev must be an iterator, on feed: ' + msg.metadata.who);
return new Error('invalid message: prev must be an array, on feed: ' + msg.metadata.who);
}
for (const p of msg.metadata.prev) {
for (const p of tangle.prev) {
if (typeof p !== 'string') {
// prettier-ignore
return new Error('invalid message: prev must contain strings but found ' + p + ', on feed: ' + msg.metadata.who);
@ -103,21 +103,18 @@ function validatePrev(msg, existingMsgs) {
// prettier-ignore
return new Error('invalid message: prev ' + p + ' is not from the same type, on feed: ' + msg.metadata.who);
}
if (existingMsg.metadata.depth >= msg.metadata.depth) {
const existingDepth = existingMsg.metadata.tangles[tangleId]?.depth ?? 0
if (existingDepth >= tangle.depth) {
// prettier-ignore
return new Error('invalid message: depth of prev ' + p + ' is not lower, on feed: ' + msg.metadata.who);
}
}
}
function validateFirstPrev(msg) {
if (!Array.isArray(msg.metadata.prev)) {
function validateTangleRoot(msg, tangleId) {
if (msg.metadata.tangles[tangleId]) {
// prettier-ignore
return new Error('invalid message: prev must be an array, on feed: ' + msg.metadata.who);
}
if (msg.metadata.prev.length !== 0) {
// prettier-ignore
return new Error('invalid message: prev of 1st msg must be an empty array, on feed: ' + msg.metadata.who);
return new Error('invalid message: tangle root must not have self tangle data, on feed: ' + msg.metadata.who);
}
}
@ -152,7 +149,7 @@ function validateContent(msg) {
// FIXME: if content does not exist, do nothing
const { content } = msg
if (!content) {
return new Error('invalid message: must have content')
return
}
if (Array.isArray(content)) {
return new Error('invalid message: content must not be an array')
@ -165,48 +162,28 @@ function validateContent(msg) {
// FIXME: validateDepth should be +1 of the max of prev depth
function validateSync(msg, existingMsgs) {
function validateSync(msg, existingMsgs, msgHash, rootHash) {
let err
if ((err = validateShape(msg))) return err
if ((err = validateWho(msg))) return err
if ((err = validateWhen(msg))) return err
if (msg.metadata.depth === 0) {
if ((err = validateFirstPrev(msg))) return err
if (msgHash === rootHash) {
if ((err = validateTangleRoot(msg))) return err
} else {
if ((err = validatePrev(msg, existingMsgs))) return err
if ((err = validateTangle(msg, existingMsgs, rootHash))) return err
}
if ((err = validateContent(msg))) return err
if ((err = validateSignature(msg))) return err
}
// function validateOOOSync(nativeMsg, hmacKey) {
// let err
// if ((err = validateShape(nativeMsg))) return err
// if ((err = validateHmac(hmacKey))) return err
// if ((err = validateAuthor(nativeMsg))) return err
// if ((err = validateHash(nativeMsg))) return err
// if ((err = validateOrder(nativeMsg))) return err
// if ((err = validateContent(nativeMsg))) return err
// if ((err = validateAsJSON(nativeMsg))) return err
// if ((err = validateSignature(nativeMsg, hmacKey))) return err
// }
function validate(msg, existingMsgs, cb) {
function validate(msg, existingMsgs, msgHash, rootHash, cb) {
let err
if ((err = validateSync(msg, existingMsgs))) {
if ((err = validateSync(msg, existingMsgs, msgHash, rootHash))) {
return cb(err)
}
cb()
}
// function validateOOO(nativeMsg, hmacKey, cb) {
// let err
// if ((err = validateOOOSync(nativeMsg, hmacKey))) {
// return cb(err)
// }
// cb()
// }
// function validateBatch(nativeMsgs, prevNativeMsg, hmacKey, cb) {
// let err
// let prev = prevNativeMsg
@ -218,15 +195,6 @@ function validate(msg, existingMsgs, cb) {
// cb()
// }
// function validateOOOBatch(nativeMsgs, hmacKey, cb) {
// let err
// for (const nativeMsg of nativeMsgs) {
// err = validateOOOSync(nativeMsg, hmacKey)
// if (err) return cb(err)
// }
// cb()
// }
module.exports = {
validateType,
validateContent,
@ -234,6 +202,4 @@ module.exports = {
validate,
validateMsgHash,
// validateBatch,
// validateOOO,
// validateOOOBatch,
}

View File

@ -4,7 +4,7 @@ const AAOL = require('async-append-only-log')
const promisify = require('promisify-4loc')
const Obz = require('obz')
const FeedV1 = require('./feed-v1')
const { ReadyGate } = require('./utils')
const { ReadyGate, isEmptyObject } = require('./utils')
const { decrypt } = require('./encryption')
/**
@ -48,33 +48,6 @@ exports.init = function initDB(peer, config) {
const encryptionFormats = new Map()
const onRecordAdded = Obz()
const msgsPerFeed = {
_mapAll: new Map(), // who => Set<MsgHash>
_byHash: new Map(), // msgId => Msg // TODO: optimize space usage of this??
update(msg, msgHash) {
const feedId = FeedV1.getFeedId(msg)
const setAll = this._mapAll.get(feedId) ?? new Set()
setAll.add(msgHash)
this._mapAll.set(feedId, setAll)
this._byHash.set(msgHash, msg)
},
getAll(feedId) {
const map = new Map()
for (const msgHash of this._mapAll.get(feedId) ?? []) {
const msg = this._byHash.get(msgHash)
if (msg) map.set(msgHash, msg)
}
return map
},
deleteMsg(msg) {
const feedId = FeedV1.getFeedId(msg)
const msgHash = FeedV1.getMsgHash(msg)
const setAll = this._mapAll.get(feedId)
setAll.delete(msgHash)
this._byHash.delete(msgHash)
},
}
const log = AAOL(path.join(config.path, 'db.bin'), {
cacheSize: 1,
blockSize: 64 * 1024,
@ -124,8 +97,6 @@ exports.init = function initDB(peer, config) {
rec.misc.size = size
rec.misc.seq = i
recs.push(rec)
msgsPerFeed.update(rec.msg)
},
function drainEnd(err) {
// prettier-ignore
@ -175,20 +146,17 @@ exports.init = function initDB(peer, config) {
return encryptionFormat
}
function add(msg, cb) {
const feedId = FeedV1.getFeedId(msg)
function add(msg, tangleRootHash, cb) {
// TODO: optimize this. This may be slow if you're adding many msgs in a
// row, because `getAll()` creates a new Map() each time.
const existingMsgs = msgsPerFeed.getAll(feedId)
// row, because it creates a new Map() each time.
const tangleMsgs = populateTangle(tangleRootHash)
FeedV1.validate(msg, existingMsgs, validationCB)
const msgHash = FeedV1.getMsgHash(msg)
FeedV1.validate(msg, tangleMsgs, msgHash, tangleRootHash, validationCB)
function validationCB(err) {
// prettier-ignore
if (err) return cb(new Error('add() failed validation for feed format v1', {cause: err}))
const msgHash = FeedV1.getMsgHash(msg)
msgsPerFeed.update(msg, msgHash)
logAppend(msgHash, msg, logAppendCB)
}
@ -199,6 +167,16 @@ exports.init = function initDB(peer, config) {
}
}
function getFeedRoot(findWho, findType) {
for (const rec of records()) {
const { who, type, tangles } = rec.msg.metadata
if (who === findWho && type === findType && isEmptyObject(tangles)) {
return rec.hash
}
}
return null
}
function populateTangle(tangleId) {
const map = new Map()
for (const rec of records()) {
@ -212,7 +190,7 @@ exports.init = function initDB(peer, config) {
function populateTangles(tangleIds) {
const tangles = {}
for (const tangleId of tangleIds) {
tangles[tangleId] = populateTangle(tangleId)
tangles[tangleId] ??= populateTangle(tangleId)
}
return tangles
}
@ -230,23 +208,22 @@ exports.init = function initDB(peer, config) {
if (!opts.content) return cb(new Error('create() requires a `content`'))
if (!opts.type) return cb(new Error('create() requires a `type`'))
// Create full opts:
const tangles = populateTangles(opts.tangles ?? [])
let tempMsg
try {
tempMsg = FeedV1.create({
when: Date.now(),
...opts,
tangles,
existing: [],
keys,
const feedRootHash = getFeedRoot(FeedV1.stripAuthor(keys.id), opts.type)
if (!feedRootHash) {
const feedRoot = FeedV1.createRoot(keys, opts.type)
add(feedRoot, FeedV1.getMsgHash(feedRoot), (err) => {
// prettier-ignore
if (err) return cb(new Error('create() failed to create root', {cause: err}));
create(opts, cb)
})
} catch (err) {
return cb(new Error('create() failed', { cause: err }))
return
}
const feedId = FeedV1.getFeedId(tempMsg)
const existing = msgsPerFeed.getAll(feedId)
const fullOpts = { when: Date.now(), ...opts, tangles, existing, keys }
// Fill-in tangle opts:
const tangleTemplates = opts.tangles ?? []
tangleTemplates.push(feedRootHash)
const tangles = populateTangles(tangleTemplates)
const fullOpts = { when: Date.now(), ...opts, tangles, keys }
// If opts ask for encryption, encrypt and put ciphertext in opts.content
const recps = fullOpts.content.recps
@ -276,7 +253,6 @@ exports.init = function initDB(peer, config) {
return cb(new Error('create() failed', { cause: err }))
}
const msgHash = FeedV1.getMsgHash(msg)
msgsPerFeed.update(msg, msgHash)
// Encode the native message and append it to the log:
logAppend(msgHash, msg, (err, rec) => {
@ -289,7 +265,6 @@ exports.init = function initDB(peer, config) {
function del(msgId, cb) {
const rec = getRecord(msgId)
msgsPerFeed.deleteMsg(rec.msg)
const { offset, size, seq } = rec.misc
recs[rec.misc.seq] = { misc: { offset, size, seq } }
log.onDrain(() => {
@ -317,7 +292,7 @@ exports.init = function initDB(peer, config) {
const rec = recs[i]
if (!rec) continue
if (isUri && msgId.endsWith(rec.hash)) return rec
else if (!isUri && rec.hash.endsWith(msgId)) return rec
else if (!isUri && rec.hash === msgId) return rec
}
return null
}

View File

@ -18,4 +18,12 @@ class ReadyGate {
}
}
module.exports = { ReadyGate }
function isEmptyObject(obj) {
for (const _key in obj) {
return false
}
return true
}
module.exports = { ReadyGate, isEmptyObject }

View File

@ -20,15 +20,23 @@ test('add()', async (t) => {
await peer.db.loaded()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const recRoot = await p(peer.db.add)(rootMsg, rootHash)
t.equals(recRoot.msg.metadata.when, 0, 'root msg added')
const inputMsg = FeedV1.create({
keys,
when: 1514517067954,
type: 'post',
content: { text: 'This is the first post!' },
existing: [],
tangles: {
[rootHash]: new Map([[FeedV1.getMsgHash(rootMsg), rootMsg]]),
},
})
const rec = await p(peer.db.add)(inputMsg)
const rec = await p(peer.db.add)(inputMsg, rootHash)
t.equal(rec.msg.content.text, 'This is the first post!')
await p(peer.close)(true)

View File

@ -23,6 +23,8 @@ test('setup', async (t) => {
await peer.db.loaded()
})
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
let msgHash1
let rec1
let msgHash2
@ -32,6 +34,17 @@ test('create()', async (t) => {
content: { text: 'I am 1st post' },
})
t.equal(rec1.msg.content.text, 'I am 1st post', 'msg1 text correct')
t.equal(
rec1.msg.metadata.tangles[rootHash].depth,
1,
'msg1 tangle depth correct'
)
t.deepEquals(
rec1.msg.metadata.tangles[rootHash].prev,
[rootHash],
'msg1 tangle prev correct'
)
msgHash1 = FeedV1.getMsgHash(rec1.msg)
const rec2 = await p(peer.db.create)({
@ -39,7 +52,16 @@ test('create()', async (t) => {
content: { text: 'I am 2nd post' },
})
t.equal(rec2.msg.content.text, 'I am 2nd post', 'msg2 text correct')
t.deepEquals(rec2.msg.metadata.prev, [msgHash1], 'msg2 prev correct')
t.equal(
rec2.msg.metadata.tangles[rootHash].depth,
2,
'msg2 tangle depth correct'
)
t.deepEquals(
rec2.msg.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 tangle prev correct'
)
msgHash2 = FeedV1.getMsgHash(rec2.msg)
})
@ -49,10 +71,15 @@ test('add() forked then create() merged', async (t) => {
when: Date.now(),
type: 'post',
content: { text: '3rd post forked from 1st' },
existing: [rec1.msg],
tangles: {
[rootHash]: new Map([
[rootHash, rootMsg],
[rec1.hash, rec1.msg],
]),
},
})
const rec3 = await p(peer.db.add)(msg3)
const rec3 = await p(peer.db.add)(msg3, rootHash)
const msgHash3 = FeedV1.getMsgHash(rec3.msg)
const rec4 = await p(peer.db.create)({
@ -61,9 +88,9 @@ test('add() forked then create() merged', async (t) => {
})
t.ok(rec4, '4th post created')
t.deepEquals(
rec4.msg.metadata.prev,
[msgHash2, msgHash3],
'msg4 prev is msg2 and msg3'
rec4.msg.metadata.tangles[rootHash].prev,
[rootHash, msgHash2, msgHash3],
'msg4 prev is root, msg2 and msg3'
)
})

View File

@ -31,7 +31,7 @@ test('del', async (t) => {
const before = []
for (const msg of peer.db.msgs()) {
before.push(msg.content.text)
if (msg.content) before.push(msg.content.text)
}
t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], 'msgs before the delete')
@ -40,7 +40,7 @@ test('del', async (t) => {
const after = []
for (const msg of peer.db.msgs()) {
after.push(msg.content.text)
if (msg.content) after.push(msg.content.text)
}
t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], 'msgs after the delete')
@ -78,7 +78,7 @@ test('del', async (t) => {
})
t.deepEqual(
persistedMsgs.map((msg) => msg.content.text),
persistedMsgs.filter((msg) => msg.content).map((msg) => msg.content.text),
['m0', 'm1', 'm3', 'm4'],
'msgs in disk after the delete'
)

View File

@ -2,6 +2,24 @@ const tape = require('tape')
const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util')
let rootMsg = null
let rootHash = null
tape('FeedV1.createRoot()', (t) => {
const keys = generateKeypair('alice')
rootMsg = FeedV1.createRoot(keys, 'post')
t.equals(rootMsg.content, null, 'content')
t.equals(rootMsg.metadata.proof, '', 'proof')
t.equals(rootMsg.metadata.size, 0, 'size')
t.equals(rootMsg.metadata.type, 'post', 'type')
t.equals(rootMsg.metadata.who, FeedV1.stripAuthor(keys.id), 'who')
t.equals(rootMsg.metadata.when, 0, 'when')
t.deepEquals(rootMsg.metadata.tangles, {}, 'tangles')
rootHash = FeedV1.getMsgHash(rootMsg)
t.equals(rootHash, 'PpkBfa8C4sB8wHrqiNmHqe', 'root hash')
t.end()
})
tape('FeedV1.create()', (t) => {
const keys = generateKeypair('alice')
const content = { text: 'Hello world!' }
@ -11,12 +29,14 @@ tape('FeedV1.create()', (t) => {
keys,
content,
type: 'post',
existing: [],
tangles: {
[rootHash]: new Map([[rootHash, rootMsg]]),
},
when,
})
t.deepEquals(
Object.keys(msg1.metadata),
['depth', 'prev', 'proof', 'size', 'type', 'who', 'when'],
['proof', 'size', 'tangles', 'type', 'who', 'when'],
'metadata fields'
)
t.equals(
@ -25,16 +45,17 @@ tape('FeedV1.create()', (t) => {
'metadata.who'
)
t.equals(msg1.metadata.type, 'post', 'metadata.type')
t.equals(msg1.metadata.depth, 0, 'metadata.depth')
t.deepEquals(msg1.metadata.prev, [], 'metadata.prev')
t.deepEquals(msg1.metadata.proof, '9R7XmBhHF5ooPg34j9TQcz', 'metadata.proof')
t.deepEquals(Object.keys(msg1.metadata.tangles), [rootHash], 'tangles')
t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'tangle depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'tangle prev')
t.deepEquals(msg1.metadata.size, 23, 'metadata.size')
t.equals(typeof msg1.metadata.when, 'number', 'metadata.when')
t.deepEqual(msg1.content, content, 'content is correct')
console.log(msg1)
const msgHash1 = '9cYegpVpddoMSdvSf53dTH'
const msgHash1 = 'YWbEeMtcU4eNwF6uJVTrKE'
t.equals(
FeedV1.getMsgId(msg1),
@ -49,12 +70,17 @@ tape('FeedV1.create()', (t) => {
keys,
content: content2,
type: 'post',
existing: new Map([[msgHash1, msg1]]),
tangles: {
[rootHash]: new Map([
[rootHash, rootMsg],
[msgHash1, msg1],
]),
},
when: when + 1,
})
t.deepEquals(
Object.keys(msg2.metadata),
['depth', 'prev', 'proof', 'size', 'type', 'who', 'when'],
['proof', 'size', 'tangles', 'type', 'who', 'when'],
'metadata keys'
)
t.equals(
@ -63,8 +89,9 @@ tape('FeedV1.create()', (t) => {
'metadata.who'
)
t.equals(msg2.metadata.type, 'post', 'metadata.type')
t.equals(msg2.metadata.depth, 1, 'metadata.depth')
t.deepEquals(msg2.metadata.prev, [msgHash1], 'metadata.prev')
t.deepEquals(Object.keys(msg1.metadata.tangles), [rootHash], 'tangles')
t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'tangle depth')
t.deepEquals(msg2.metadata.tangles[rootHash].prev, [msgHash1], 'tangle prev')
t.deepEquals(msg2.metadata.proof, 'XuZEzH1Dhy1yuRMcviBBcN', 'metadata.proof')
t.deepEquals(msg2.metadata.size, 21, 'metadata.size')
t.equals(typeof msg2.metadata.when, 'number', 'metadata.when')
@ -74,7 +101,7 @@ tape('FeedV1.create()', (t) => {
t.deepEqual(
FeedV1.getMsgId(msg2),
'ppppp:message/v1/4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW/post/LEH1JVENvJgSpBBrVUwJx6',
'ppppp:message/v1/4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW/post/R9XRXBL1ntSKRrrk86bhn8',
'getMsgId'
)
@ -84,17 +111,23 @@ tape('FeedV1.create()', (t) => {
tape('create() handles DAG tips correctly', (t) => {
const keys = generateKeypair('alice')
const when = 1652037377204
const existing = new Map()
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({
keys,
content: { text: '1' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: when + 1,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
t.deepEquals(msg1.metadata.prev, [], 'msg1.prev is empty')
t.deepEquals(
msg1.metadata.tangles[rootHash].prev,
['PpkBfa8C4sB8wHrqiNmHqe'],
'msg1.prev is root'
)
existing.set(msgHash1, msg1)
@ -102,20 +135,33 @@ tape('create() handles DAG tips correctly', (t) => {
keys,
content: { text: '2A' },
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 2,
})
t.deepEquals(msg2A.metadata.prev, [msgHash1], 'msg2A.prev is msg1')
t.deepEquals(
msg2A.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2A.prev is msg1'
)
const msg2B = FeedV1.create({
keys,
content: { text: '2B' },
type: 'post',
tangles: {
[rootHash]: existing,
},
existing,
when: when + 2,
})
const msgHash2B = FeedV1.getMsgHash(msg2B)
t.deepEquals(msg2B.metadata.prev, [msgHash1], 'msg2B.prev is msg1')
t.deepEquals(
msg2B.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2B.prev is msg1'
)
existing.set(msgHash2B, msg2B)
@ -123,11 +169,17 @@ tape('create() handles DAG tips correctly', (t) => {
keys,
content: { text: '3' },
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 3,
})
const msgHash3 = FeedV1.getMsgHash(msg3)
t.deepEquals(msg3.metadata.prev, [msgHash2B], 'msg3.prev is msg2B')
t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2B],
'msg3.prev is root(lipmaa),msg2B(previous)'
)
existing.set(msgHash3, msg3)
const msgHash2A = FeedV1.getMsgHash(msg2A)
@ -138,13 +190,15 @@ tape('create() handles DAG tips correctly', (t) => {
keys,
content: { text: '4' },
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 4,
})
t.deepEquals(
msg4.metadata.prev,
[msgHash1, msgHash3, msgHash2A],
'msg4.prev is [msg1(lipmaa),msg3(previous),msg2A(old fork as tip)]'
msg4.metadata.tangles[rootHash].prev,
[msgHash3, msgHash2A],
'msg4.prev is [msg3(previous),msg2A(old fork as tip)]'
)
t.end()

View File

@ -3,84 +3,67 @@ const base58 = require('bs58')
const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util')
tape('invalid 1st msg with non-array prev', (t) => {
tape('invalid msg with non-array prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
msg.metadata.prev = null
msg.metadata.tangles[rootHash].prev = null
const msgHash = FeedV1.getMsgHash(msg)
FeedV1.validate(msg, new Map(), (err) => {
FeedV1.validate(msg, existing, msgHash, rootHash, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(err.message, /prev must be an array/, 'invalid 2nd msg description')
t.end()
})
})
tape('invalid msg with non-array prev', (t) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030002000,
})
msg2.metadata.prev = null
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
/prev must be an array/,
'invalid 2nd msg description'
)
t.end()
})
})
tape('invalid msg with bad prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030002000,
})
msg2.metadata.depth = 1
msg2.metadata.prev = [1234]
msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.tangles[rootHash].prev = [1234]
const msgHash2 = FeedV1.getMsgHash(msg2)
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
FeedV1.validate(msg2, existing, msgHash2, rootHash, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
@ -94,30 +77,39 @@ tape('invalid msg with bad prev', (t) => {
tape('invalid msg with URI in prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030002000,
})
const msgHash2 = FeedV1.getMsgHash(msg2)
const randBuf = Buffer.alloc(16).fill(16)
const fakeMsgKey1 = `ppppp:message/v1/${base58.encode(randBuf)}`
msg2.metadata.depth = 1
msg2.metadata.prev = [fakeMsgKey1]
msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.tangles[rootHash].prev = [fakeMsgKey1]
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
FeedV1.validate(msg2, existing, msgHash2, rootHash, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
@ -131,20 +123,30 @@ tape('invalid msg with URI in prev', (t) => {
tape('invalid msg with unknown prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const existing = new Map([[rootHash, rootMsg]])
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
const unknownMsg = FeedV1.create({
keys,
content: { text: 'Alien' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
const unknownMsgHash = FeedV1.getMsgHash(unknownMsg)
@ -153,13 +155,14 @@ tape('invalid msg with unknown prev', (t) => {
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[unknownMsgHash, unknownMsg]]),
tangles: {
[rootHash]: new Map([[rootHash, rootMsg], [unknownMsgHash, unknownMsg]]),
},
when: 1652030002000,
})
const msgHash2 = FeedV1.getMsgHash(msg2)
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
FeedV1.validate(msg2, existing, msgHash2, rootHash, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,

View File

@ -12,7 +12,6 @@ tape('invalid type not a string', (t) => {
content: { text: 'Hello world!' },
when: 1652037377204,
type: 123,
existing: new Map(),
})
},
/type is not a string/,
@ -31,7 +30,6 @@ tape('invalid type with "/" character', (t) => {
content: { text: 'Hello world!' },
when: 1652037377204,
type: 'group/init',
existing: new Map(),
})
},
/invalid type/,
@ -50,7 +48,6 @@ tape('invalid type with "*" character', (t) => {
content: { text: 'Hello world!' },
when: 1652037377204,
type: 'star*',
existing: new Map(),
})
},
/invalid type/,
@ -69,7 +66,6 @@ tape('invalid type too short', (t) => {
content: { text: 'Hello world!' },
when: 1652037377204,
type: 'xy',
existing: new Map(),
})
},
/shorter than 3/,
@ -88,7 +84,6 @@ tape('invalid type too long', (t) => {
content: { text: 'Hello world!' },
when: 1652037377204,
type: 'a'.repeat(120),
existing: new Map(),
})
},
/100\+ characters long/,

View File

@ -8,64 +8,132 @@ tape('lipmaa prevs', (t) => {
const when = 1652037377204
const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content,
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: when + 1,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
existing.set(msgHash1, msg1)
t.deepEquals(msg1.metadata.prev, [], 'msg1.prev is empty')
t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'msg1 depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'msg1 prev')
const msg2 = FeedV1.create({
keys,
content,
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 2,
})
const msgHash2 = FeedV1.getMsgHash(msg2)
existing.set(msgHash2, msg2)
t.deepEquals(msg2.metadata.prev, [msgHash1], 'msg2.prev is msg1')
t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'msg2 depth')
t.deepEquals(
msg2.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 prev'
)
const msg3 = FeedV1.create({
keys,
content,
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 3,
})
const msgHash3 = FeedV1.getMsgHash(msg3)
existing.set(msgHash3, msg3)
t.deepEquals(msg3.metadata.prev, [msgHash2], 'msg3.prev is msg2')
t.equals(msg3.metadata.tangles[rootHash].depth, 3, 'msg3 depth')
t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2],
'msg3 prev (has lipmaa!)'
)
const msg4 = FeedV1.create({
keys,
content,
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 4,
})
const msgHash4 = FeedV1.getMsgHash(msg4)
existing.set(msgHash4, msg4)
t.equals(msg4.metadata.tangles[rootHash].depth, 4, 'msg4 depth')
t.deepEquals(
msg4.metadata.prev,
[msgHash1, msgHash3],
'msg4.prev is msg1 and msg3'
msg4.metadata.tangles[rootHash].prev,
[msgHash3],
'msg4 prev'
)
const msg5 = FeedV1.create({
keys,
content,
type: 'post',
existing,
tangles: {
[rootHash]: existing,
},
when: when + 5,
})
const msgHash5 = FeedV1.getMsgHash(msg5)
existing.set(msgHash5, msg5)
t.deepEquals(msg5.metadata.prev, [msgHash4], 'msg5.prev is msg4')
t.equals(msg5.metadata.tangles[rootHash].depth, 5, 'msg5 depth')
t.deepEquals(
msg5.metadata.tangles[rootHash].prev,
[msgHash4],
'msg5 prev'
)
const msg6 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: existing,
},
when: when + 6,
})
const msgHash6 = FeedV1.getMsgHash(msg6)
existing.set(msgHash6, msg6)
t.equals(msg6.metadata.tangles[rootHash].depth, 6, 'msg6 depth')
t.deepEquals(
msg6.metadata.tangles[rootHash].prev,
[msgHash5],
'msg6 prev'
)
const msg7 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: existing,
},
when: when + 7,
})
const msgHash7 = FeedV1.getMsgHash(msg7)
existing.set(msgHash7, msg7)
t.equals(msg7.metadata.tangles[rootHash].depth, 7, 'msg7 depth')
t.deepEquals(
msg7.metadata.tangles[rootHash].prev,
[msgHash3, msgHash6],
'msg7 prev (has lipmaa!)'
)
t.end()
})

View File

@ -5,16 +5,32 @@ const { generateKeypair } = require('./util')
tape('simple multi-author tangle', (t) => {
const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob')
const existingA = new Map()
const existingB = new Map()
const rootMsgA = FeedV1.createRoot(keysA, 'post')
const rootHashA = FeedV1.getMsgHash(rootMsgA)
existingA.set(rootHashA, rootMsgA)
const rootMsgB = FeedV1.createRoot(keysB, 'post')
const rootHashB = FeedV1.getMsgHash(rootMsgB)
existingB.set(rootHashB, rootMsgB)
const msg1 = FeedV1.create({
keys: keysA,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHashA]: existingA,
},
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
t.notOk(msg1.metadata.tangles, 'msg1 has no extra tangles')
t.deepEquals(
Object.keys(msg1.metadata.tangles),
[rootHashA],
'msg1 has only feed tangle'
)
const msg2 = FeedV1.create({
keys: keysB,
@ -22,12 +38,24 @@ tape('simple multi-author tangle', (t) => {
type: 'post',
existing: new Map(),
tangles: {
[rootHashB]: existingB,
[msgHash1]: new Map([[msgHash1, msg1]]),
},
when: 1652030002000,
})
t.ok(msg2.metadata.tangles, 'msg2 has extra tangles')
t.ok(msg2.metadata.tangles[msgHash1], 'msg2 has tangle for msgHash1')
t.deepEquals(
Object.keys(msg2.metadata.tangles),
[rootHashB, msgHash1],
'msg2 has feed tangle and misc tangle'
)
t.equal(msg2.metadata.tangles[rootHashB].depth, 1, 'msg2 feed tangle depth')
t.deepEquals(
msg2.metadata.tangles[rootHashB].prev,
[rootHashB],
'msg2 feed tangle prev'
)
t.equal(msg2.metadata.tangles[msgHash1].depth, 1, 'msg2 has tangle depth 1')
t.deepEquals(
msg2.metadata.tangles[msgHash1].prev,
@ -48,18 +76,29 @@ tape('lipmaa in multi-author tangle', (t) => {
const existingB = new Map()
const tangleExisting = new Map()
const rootMsgA = FeedV1.createRoot(keysA, 'post')
const rootHashA = FeedV1.getMsgHash(rootMsgA)
existingA.set(rootHashA, rootMsgA)
const rootMsgB = FeedV1.createRoot(keysB, 'post')
const rootHashB = FeedV1.getMsgHash(rootMsgB)
existingB.set(rootHashB, rootMsgB)
const msg1 = FeedV1.create({
keys: keysA,
content,
type: 'post',
existing: existingA,
tangles: {
[rootHashA]: existingA,
},
when: when + 1,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
existingA.set(msgHash1, msg1)
tangleExisting.set(msgHash1, msg1)
t.notOk(msg1.metadata.tangles, 'A:msg1 has no extra tangles')
t.deepEquals(Object.keys(msg1.metadata.tangles),[rootHashA], 'A:msg1 has only feed tangle')
const msg2 = FeedV1.create({
keys: keysB,
@ -67,6 +106,7 @@ tape('lipmaa in multi-author tangle', (t) => {
type: 'post',
existing: existingB,
tangles: {
[rootHashB]: existingB,
[msgHash1]: tangleExisting,
},
when: when + 2,
@ -87,6 +127,7 @@ tape('lipmaa in multi-author tangle', (t) => {
type: 'post',
existing: existingB,
tangles: {
[rootHashB]: existingB,
[msgHash1]: tangleExisting,
},
when: when + 3,
@ -107,6 +148,7 @@ tape('lipmaa in multi-author tangle', (t) => {
type: 'post',
existing: existingA,
tangles: {
[rootHashA]: existingA,
[msgHash1]: tangleExisting,
},
when: when + 4,

View File

@ -3,106 +3,42 @@ const base58 = require('bs58')
const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util')
tape('validate 1st msg', (t) => {
tape('validate root msg', (t) => {
const keys = generateKeypair('alice')
const existing = new Map()
const msg = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030001000,
})
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
FeedV1.validate(msg, [], (err) => {
FeedV1.validate(rootMsg, existing, rootHash, rootHash, (err) => {
if (err) console.log(err)
t.error(err, 'valid 1st msg')
t.error(err, 'valid root msg')
t.end()
})
})
tape('validate 2nd msg with existing nativeMsg', (t) => {
tape('validate 2nd msg with existing root', (t) => {
const keys = generateKeypair('alice')
const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
if (err) console.log(err)
t.error(err, 'valid 2nd msg')
t.end()
})
})
tape('validate 2nd msg with existing msgId', (t) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
prev: [],
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
if (err) console.log(err)
t.error(err, 'valid 2nd msg')
t.end()
})
})
tape('validate 2nd msg with existing KVT', (t) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
FeedV1.validate(msg1, existing, msgHash1, rootHash, (err) => {
if (err) console.log(err)
t.error(err, 'valid 2nd msg')
t.end()
@ -112,36 +48,38 @@ tape('validate 2nd msg with existing KVT', (t) => {
tape('validate 2nd forked msg', (t) => {
const keys = generateKeypair('alice')
const msg1 = FeedV1.create({
const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1A = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: existing,
},
existing: new Map(),
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msgHash1A = FeedV1.getMsgHash(msg1A)
const msg2A = FeedV1.create({
const msg1B = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
tangles: {
[rootHash]: existing,
},
when: 1652030002000,
})
const msgHash2A = FeedV1.getMsgHash(msg2A)
const msgHash1B = FeedV1.getMsgHash(msg1B)
const msg2B = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030003000,
})
const existing = new Map()
existing.set(msgHash1, msg1)
existing.set(msgHash2A, msg2A)
FeedV1.validate(msg2B, existing, (err) => {
existing.set(msgHash1A, msg1A)
existing.set(msgHash1B, msg1B)
FeedV1.validate(msg1B, existing, msgHash1B, rootHash, (err) => {
if (err) console.log(err)
t.error(err, 'valid 2nd forked msg')
t.end()
@ -151,29 +89,28 @@ tape('validate 2nd forked msg', (t) => {
tape('invalid msg with unknown previous', (t) => {
const keys = generateKeypair('alice')
const existing = new Map()
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
existing.set(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map(),
tangles: {
[rootHash]: existing,
},
when: 1652030001000,
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const fakeMsgKey1 = base58.encode(Buffer.alloc(16).fill(42))
const fakeMsgHash = base58.encode(Buffer.alloc(16).fill(42))
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
existing: new Map([[msgHash1, msg1]]),
when: 1652030002000,
})
msg2.metadata.prev = [fakeMsgKey1]
msg1.metadata.tangles[rootHash].prev = [fakeMsgHash]
const existing = new Map()
existing.set(msgHash1, msg1)
FeedV1.validate(msg2, existing, (err) => {
FeedV1.validate(msg1, existing, msgHash1, rootHash, (err) => {
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,

View File

@ -31,6 +31,7 @@ test('msgs() iterator', async (t) => {
const posts = []
const abouts = []
for (const msg of peer.db.msgs()) {
if (!msg.content) continue
if (msg.metadata.type === 'post') posts.push(msg.content.text)
else if (msg.metadata.type === 'about') abouts.push(msg.content.name)
}

View File

@ -31,8 +31,10 @@ test('onRecordAdded', async (t) => {
await p(setTimeout)(500)
t.equal(listened.length, 1)
t.deepEquals(listened, [rec1])
t.equal(listened.length, 2)
t.deepEquals(listened[0].msg.content, null, 'root')
t.deepEquals(listened[0].msg.metadata.when, 0, 'root')
t.deepEquals(listened[1], rec1, 'actual record')
remove()
await p(peer.close)(true)

View File

@ -46,6 +46,7 @@ test('create some msgs, close, re-open', async (t) => {
const texts = []
for (const msg of peer2.db.msgs()) {
if (!msg.content) continue
texts.push(msg.content.text)
}

View File

@ -30,6 +30,7 @@ test('records() iterator', async (t) => {
let count = 0
for (const rec of peer.db.records()) {
if (!rec.msg.content) continue
t.true(rec.misc.size > rec.msg.metadata.size)
count++
}