Compare commits

...

145 Commits
rev1 ... master

Author SHA1 Message Date
Jacob Karlsson f79914d838 1.0.4 2024-06-05 17:26:03 +02:00
Jacob Karlsson 37022b8969 Export DBTangle type 2024-06-05 17:25:51 +02:00
Jacob Karlsson 995c70fe68 1.0.3 2024-06-01 18:22:47 +02:00
Jacob Karlsson 047c88fb86 Return null instead of throwing on property errs (#8)
Trying to fix https://codeberg.org/pzp/pzp-sync/issues/10

This doesn't solve the problem of missing rootMsg but maybe allows sync to fail more gracefully

Reviewed-on: https://codeberg.org/pzp/pzp-db/pulls/8
Co-authored-by: Jacob Karlsson <jacob.karlsson95@gmail.com>
Co-committed-by: Jacob Karlsson <jacob.karlsson95@gmail.com>
2024-06-01 16:21:00 +00:00
Jacob Karlsson eefe93820d 1.0.2 2024-05-17 17:39:03 +02:00
Jacob Karlsson 1d2470865b Improve cb types 2024-05-17 17:38:54 +02:00
Jacob Karlsson 8e6128f238 Rename to pzp (#4)
For https://codeberg.org/pzp/pzp-sdk/issues/1

Reviewed-on: https://codeberg.org/pzp/pzp-db/pulls/4
Co-authored-by: Jacob Karlsson <jacob.karlsson95@gmail.com>
Co-committed-by: Jacob Karlsson <jacob.karlsson95@gmail.com>
2024-04-27 21:47:50 +00:00
Jacob Karlsson f93082f38d Skip really long test in ci 2024-04-17 12:32:32 +02:00
Jacob Karlsson b4190b0ae2 Increase test timeout more 2024-04-14 23:34:19 +02:00
Jacob Karlsson 6acd57c707 Enable woodpecker CI (#3)
Fixes https://codeberg.org/pzp/pzp-db/issues/2

Reviewed-on: https://codeberg.org/pzp/pzp-db/pulls/3
Co-authored-by: Jacob Karlsson <jacob.karlsson95@gmail.com>
Co-committed-by: Jacob Karlsson <jacob.karlsson95@gmail.com>
2024-04-14 14:46:19 +00:00
Powersource f056429f31
Document the api (#31) 2024-04-13 14:33:36 +02:00
Powersource 15cfc7459d
Make all APIs async, internally wait for rescanning (#29) 2024-04-07 17:04:32 +02:00
Powersource e40c7cff09
Prune the accountTangle in getSigkeysInAccount (#23) 2024-03-25 16:46:28 +01:00
Powersource d332308104
Add db.account.del (#27) 2024-03-25 16:45:08 +01:00
Powersource 3c236581fa
Make getSigkeysInAccount consider "del" (#24) 2024-03-25 16:33:41 +01:00
Andre Staltz ba6f727417
moot can be added without sigkey validation 2024-03-12 12:59:40 +02:00
Andre Staltz 0c86b43d22
improve Tangle error messages 2024-03-06 11:13:04 +02:00
Andre Staltz cf1532965e
tiny refactor in Ghosts 2024-03-01 14:56:44 +02:00
Andre Staltz 6661b73fd4
add() can replace a dataless msg with a dataful 2024-02-28 15:32:36 +02:00
Andre Staltz cbeabab904
Tangle can be built from randomly ordered msgs 2024-02-28 13:53:31 +02:00
Andre Staltz c31580a961
add a few code comments 2024-02-27 18:03:35 +02:00
Andre Staltz 60afd4b64b
Re-scan log into memory after compacting 2024-02-27 16:52:01 +02:00
Andre Staltz 667b33779d
add() supports concurrency 2024-01-31 13:12:55 +02:00
Andre Staltz 73b9a80c73
package.json ppppp deps specify commit tag 2024-01-31 13:01:04 +02:00
Andre Staltz 68605ea387
getTangle() returns null if tangleID is unknown 2024-01-30 17:27:31 +02:00
Andre Staltz 4946afac12
fix typescript 2024-01-19 12:48:57 +02:00
Andre Staltz 7d5588ef4d
new API onRecordDeletedOrErased 2024-01-19 12:47:52 +02:00
Andre Staltz f8a2006eb1
improve types of dependencies 2024-01-19 12:24:38 +02:00
Andre Staltz 9e41400cdc
Log: fix concurrent overwrite() and compact() 2024-01-18 16:40:39 +02:00
Andre Staltz 844b808e78
new name for the log stats file 2024-01-18 16:27:17 +02:00
Andre Staltz 69a0a1f60c
improve debug logs in the Log module 2024-01-18 16:25:00 +02:00
Andre Staltz 17742f9ed2
avoid infinite loop in Tangle.precedes() 2024-01-17 15:47:58 +02:00
Andre Staltz 001fcaa1ab
refactor isMoot() 2024-01-05 14:18:19 +02:00
Andre Staltz cc1f83c064
use config.global.keypair 2023-12-29 12:14:59 +02:00
Andre Staltz 23076a58ca
update msg-v4 export 2023-12-25 12:27:42 +02:00
Andre Staltz 3a2bfe25d0
msg-v4 2023-12-25 12:24:35 +02:00
Andre Staltz fd2e50438a
fix microqueue tests 2023-12-21 11:29:32 +02:00
Andre Staltz f38a62e006
use the microqueue for onRecordAdded 2023-12-21 11:17:28 +02:00
Andre Staltz 42acbc310f
add() supports inferring tangleID 2023-12-15 15:49:54 +02:00
Andre Staltz fa15271fdf
loosen tangle.slice() arguments 2023-12-15 14:38:02 +02:00
Andre Staltz f38b293c76
more tests 2023-12-15 14:36:25 +02:00
Andre Staltz 99c1520415
new API tangle.slice() 2023-12-15 14:34:34 +02:00
Andre Staltz f523df3a0e
MsgV3.isFeedMsg() API 2023-12-14 15:58:49 +02:00
Andre Staltz e86da204cb
format test file a bit 2023-12-14 14:49:51 +02:00
Andre Staltz b76130aa74
new feed.getID() API 2023-12-11 15:29:31 +02:00
Andre Staltz f9a3875ffd
rename feed.getID() to feed.findMoot() 2023-12-11 15:25:57 +02:00
Andre Staltz 5b81c6ea82
move config.path to config.db.path 2023-11-24 15:44:25 +02:00
Andre Staltz dd492553be
log: improve error messages in compact() 2023-11-23 17:02:32 +02:00
Andre Staltz 29badc9fd8
log: dont compact on startup 2023-11-23 17:02:07 +02:00
Andre Staltz 1f88d67116
log: show compaction duration in debugging 2023-11-23 16:49:23 +02:00
Andre Staltz 2b1de9bce7
improve tests for del() and erase() 2023-11-23 16:09:06 +02:00
Andre Staltz cfebe1f46c
expose log methods: stats, compact 2023-11-23 15:49:54 +02:00
Andre Staltz ac8e730b64
fix some typescript 2023-11-23 15:43:41 +02:00
Andre Staltz 5e63142e50
log: fix how stats are counted 2023-11-23 15:38:05 +02:00
Andre Staltz a84dd297a5
log: implement compaction (sift-clone and rename) 2023-11-23 15:04:40 +02:00
Andre Staltz d21c7ed697
fix test for account powers validation 2023-11-23 12:55:17 +02:00
Andre Staltz f8db4fb224
fix account powers validation 2023-11-23 12:10:27 +02:00
Andre Staltz 3e4fe864f7
align impl and spec regarding account key purposes 2023-11-16 14:09:55 +02:00
Andre Staltz fc47a4006b
account msgs have "account__" domain prefix 2023-11-16 12:09:00 +02:00
Andre Staltz 1ff84756bc
flatten tagged union in msg AccountData 2023-11-16 11:29:55 +02:00
Andre Staltz 9223402335
refactor: simplify some code in MsgV3 2023-11-13 20:44:20 +02:00
Andre Staltz accc41a68c
refactor: remove unreachable code in MsgV3 2023-11-13 15:33:03 +02:00
Andre Staltz ae122c815e
support erase() persistence 2023-11-10 15:46:17 +02:00
Andre Staltz 9e7feb3d41
log: improve overwrite() with scheduled flushes 2023-11-10 14:50:22 +02:00
Andre Staltz 2f527613c2
refactor: cleanup dead code 2023-11-10 14:40:25 +02:00
Andre Staltz 153e75da8e
log: implement naive overwrite() 2023-11-10 14:39:15 +02:00
Andre Staltz 3636acaaa3
remove unused error msg 2023-11-10 14:22:53 +02:00
Andre Staltz f40ea71ff9
refactor/prettify tests 2023-11-10 11:06:19 +02:00
André Staltz 9356b9b3d9
Update protospec.md 2023-11-09 11:04:24 +02:00
Andre Staltz 53d8c44da8
CI: not Node.js 21 2023-11-07 11:22:25 +02:00
Andre Staltz 25c073f391
CI: use new versions of Node.js too 2023-11-07 11:17:04 +02:00
Andre Staltz 4157b4989c
fix tests in CI 2023-11-07 11:14:41 +02:00
Andre Staltz 4302094926
log: remove some unnecessary methods 2023-11-07 11:11:19 +02:00
Andre Staltz b6d17e947f
log reading methods match log codec type 2023-11-07 10:48:20 +02:00
Andre Staltz db915d0287
replace async-append-only-log with our own log 2023-11-07 09:53:56 +02:00
Andre Staltz c1f527b5d5
refactor: cosmetics 2023-10-26 13:03:52 +03:00
Andre Staltz dea38e4c1a
fix ghosts.add() against concurrent writes 2023-10-26 12:06:06 +03:00
Andre Staltz 3fccd4d661
rename ghosts.add() opts.max to opts.span 2023-10-26 10:42:08 +03:00
Andre Staltz 4fff37ad02
refuse re-adding a dataful ghost msg 2023-10-25 19:09:19 +03:00
Andre Staltz 8c3800264a
fixup for: change ghosts.add API arguments 2023-10-25 18:47:57 +03:00
Andre Staltz 31bf38e2d2
change ghosts.add API arguments 2023-10-25 18:46:34 +03:00
Andre Staltz 778dbda588
refactor to move files around 2023-10-25 18:39:57 +03:00
Andre Staltz 21c1adbd2a
make ghost read APIs synchronous 2023-10-25 15:22:07 +03:00
Andre Staltz b87ca604eb
fix minor Tangle class issues 2023-10-19 17:06:51 +03:00
Andre Staltz 75b36e9730
add API tangle.root 2023-10-19 15:10:24 +03:00
Andre Staltz 05e16a7037
reading empty ghosts should not raise error 2023-10-19 13:46:16 +03:00
Andre Staltz 748125b6e6
add tangle.id field 2023-10-19 13:37:57 +03:00
Andre Staltz d74695b7e7
add ghosts API 2023-10-18 17:04:50 +03:00
Andre Staltz 6996fb2d20
use polyfill friendly "path" instead of "node:path" 2023-10-18 14:45:06 +03:00
Andre Staltz 39d48ac416
add semantic type MsgID in lib/index.js 2023-10-18 14:44:15 +03:00
Andre Staltz 31ec544522
fix tangle getShortestPath against cycles 2023-09-26 15:31:19 +03:00
Andre Staltz fec2b46a3e
improve types for void callbacks 2023-09-26 15:30:48 +03:00
Andre Staltz 0d9ce50cfe
change getDeletablesAndErasables return type 2023-09-20 14:47:23 +03:00
Andre Staltz 385b34dbd6
tangle.getDeletablesAndErasables() accepts many args 2023-09-14 16:54:43 +03:00
Andre Staltz a5c4847244
small rename 2023-09-14 13:55:08 +03:00
Andre Staltz 530797317d
make Msg type generic in msg.data 2023-09-14 12:54:01 +03:00
Andre Staltz 5a405be367
add API logStats(cb) 2023-09-13 14:27:53 +03:00
Andre Staltz a6749cafa1
improve feed.publish input assertions 2023-09-07 17:28:43 +03:00
Andre Staltz 25eb244608
allow adding erased msgs 2023-09-07 16:38:58 +03:00
Andre Staltz b92d25c6a2
validation on add() supports encrypted inner msgs 2023-08-31 13:33:13 +03:00
Andre Staltz 222f54ea52
minor refactoring and rename message=>msg 2023-08-29 10:22:52 +03:00
Andre Staltz 3a9df124f6
refactor account msg validation 2023-08-28 17:04:23 +03:00
Andre Staltz f1a13eee80
refactor validation 2023-08-28 16:57:25 +03:00
Andre Staltz 0beb2477a2
more Tangle getter renaming 2023-08-28 15:56:21 +03:00
Andre Staltz 0b7f49846e
rename some Tangle getters 2023-08-28 15:47:24 +03:00
Andre Staltz c76584953b
some breaking change renaming
msgHash => msgID
Feed root => moot
rec.hash => rec.id
2023-08-28 15:14:20 +03:00
Andre Staltz c06828cd99
update protospec: key types 2023-08-24 14:00:24 +03:00
Andre Staltz af4325ef94
rename identity to account 2023-08-08 16:33:36 +03:00
Andre Staltz 557ea2252c
implement powers in identity.add() 2023-08-08 15:31:23 +03:00
Andre Staltz e6aa33d3f0
add more JSDocs 2023-08-08 14:04:27 +03:00
Andre Staltz 5f76000531
fix which domain is used in identity.add() 2023-07-20 20:23:27 +03:00
Andre Staltz 54f00d1944
new identity.has() API 2023-07-18 23:47:13 +03:00
Andre Staltz 7dea4b2328
use github ppppp-keypair, not local fs 2023-07-14 16:16:51 +03:00
Andre Staltz 79bc497911
detailed tag union for adding identity keys 2023-07-14 16:14:00 +03:00
Andre Staltz 07c2168f97
identity.add() can implicitly also create the consent 2023-07-11 10:49:33 +03:00
Andre Staltz 2347592aac
update protospec to mention consent sigs 2023-07-10 15:55:35 +03:00
Andre Staltz 0899ac5818
add identity.consent() API 2023-07-10 15:52:18 +03:00
Andre Staltz a3fcb641eb
tests for identity.find() etc 2023-07-10 15:41:28 +03:00
Andre Staltz a442a26c2a
tiny refactor around msgv3.createIdentity() 2023-06-25 20:50:02 +03:00
Andre Staltz df98d499f1
identity tangle always has metadata.identity=self 2023-06-25 20:48:20 +03:00
Andre Staltz 674e2ba66c
use TypeScript in JSDoc 2023-06-25 18:48:24 +03:00
Andre Staltz aab707f5da
export msg-v3 file 2023-06-25 16:52:32 +03:00
Andre Staltz 1ee69de573
replace ssb-caps with ppppp-caps 2023-06-25 11:38:19 +03:00
Andre Staltz d97d29fca0
rename type=>domain, group=>identity 2023-06-25 11:07:22 +03:00
Andre Staltz 08d3450f20
replace buffer with b4a 2023-06-15 14:55:23 +03:00
Andre Staltz aa5d49d512
replace tape with node:test 2023-06-14 11:56:00 +03:00
Andre Staltz fa16916e82
use ppppp-keypair instead of ssb-keys 2023-06-14 10:48:54 +03:00
Andre Staltz 78a6f2216c
fix a test that was still using feed-v1 2023-06-14 10:27:42 +03:00
Andre Staltz 302bfa81f8
also remove feed-v1 tests 2023-06-13 16:00:39 +03:00
Andre Staltz f66807a774
remove feed-v1 (it can be found in git tag "rev1") 2023-06-13 15:57:47 +03:00
Andre Staltz e52368b92f
fix package.json main field 2023-06-09 13:50:02 +03:00
André Staltz 6e7fca8e6a
Update protospec.md 2023-05-29 16:00:25 +03:00
Andre Staltz 5c24ffc498
add MsgV2 to protospec.md 2023-05-26 16:58:35 +03:00
Andre Staltz 5ece075c79
test that you can publish with a key added to a group 2023-05-26 14:59:15 +03:00
Andre Staltz a3f37381cd
test group.create() and group.add() 2023-05-26 14:37:15 +03:00
Andre Staltz ce5acb849e
properly implement group.add() 2023-05-26 14:37:04 +03:00
Andre Staltz d91c2a0180
test feed.getId() 2023-05-26 14:24:52 +03:00
Andre Staltz 90395b8bd3
rename feed.getRoot() to feed.getId() 2023-05-26 14:24:39 +03:00
Andre Staltz 0772aa4883
remove validateTangle 2023-05-26 14:20:31 +03:00
Andre Staltz e0a136065e
allow passing test nonce to group.create() 2023-05-26 14:02:13 +03:00
Andre Staltz a4e94f1702
add msg-v2 exports in package.json 2023-05-25 17:50:29 +03:00
Andre Staltz 393c0e4827
validate group tangle 2023-05-25 17:10:42 +03:00
Andre Staltz e957a4a578
more msg-v2 tests 2023-05-25 16:11:20 +03:00
Andre Staltz 248e14da42
small facepalm in msg-v2 validation 2023-05-25 16:06:07 +03:00
Andre Staltz 8d0d3cf175
msg-v2 with group tangles 2023-05-25 15:52:11 +03:00
72 changed files with 8758 additions and 2808 deletions

View File

@ -1,25 +0,0 @@
name: CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
matrix:
node-version: [16.x, 18.x]
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- run: npm install
- run: npm test

1
.gitignore vendored
View File

@ -3,6 +3,7 @@ node_modules
pnpm-lock.yaml
package-lock.json
coverage
lib/**/*.d.ts
*~
# For misc scripts and experiments:

13
.woodpecker.yaml Normal file
View File

@ -0,0 +1,13 @@
matrix:
NODE_VERSION:
- 18
- 20
steps:
test:
when:
event: [push]
image: node:${NODE_VERSION}
commands:
- npm install
- npm test

201
README.md
View File

@ -1,14 +1,205 @@
**Work in progress**
# pzp-db
The message database for PZP.
## Installation
We're not on npm yet. In your package.json, include this as
```js
"ppppp-db": "github:staltz/ppppp-db"
```
npm install pzp-db
```
## Usage
It's a secret-stack plugin much like ssb-db2. Other than that, you can also use
the feed format `const FeedV1 = require('ppppp-db/feed-v1')`.
the feed format `const FeedV1 = require('pzp-db/feed-v1')`.
You can use it like
```js
const p = require('node:util').promisify
const keypair = Keypair.generate('ed25519', 'alice')
const DIR = path.join(os.tmpdir(), 'pzp-db-temp')
const pzp = require('secret-stack/bare')()
.use(require('secret-stack/plugins/net'))
.use(require('secret-handshake-ext/secret-stack'))
.use(require('pzp-db'))
.use(require('ssb-box'))
.call(null, {
shse: { caps: require('pzp-caps')
},
global: {
keypair,
path: DIR
}
})
await pzp.db.loaded()
const account = await p(pzp.db.account.create)({
keypair,
subdomain: 'person',
})
const record = await p(pzp.db.feed.publish)({
account,
domain: 'post',
data: { text: 'I am 1st post' },
})
console.log("account:", account, "record:", JSON.stringify(record, null, 2))
//account: 8VLSqiWCX26w1173212RBRvY8N7MEbY3ar8fv22cGx6b record: {
// "id": "H8dQH6LzeW2He7oRVXKP6u6WbC1GQ8EABh3PgS587L3w",
// "msg": {
// "data": {
// "text": "I am 1st post"
// },
// "metadata": {
// "dataHash": "39FJFLNXj7L83nFJbrrbADdKCeFe2vP2ikuNZXVKYSXP",
// "dataSize": 24,
// "account": "8VLSqiWCX26w1173212RBRvY8N7MEbY3ar8fv22cGx6b",
// "accountTips": [
// "8VLSqiWCX26w1173212RBRvY8N7MEbY3ar8fv22cGx6b"
// ],
// "tangles": {
// "9HdQRpQNHgxiuxRy8eSEvEDG3nAL4EAYYkYHiHbU7Xqo": {
// "depth": 1,
// "prev": [
// "9HdQRpQNHgxiuxRy8eSEvEDG3nAL4EAYYkYHiHbU7Xqo"
// ]
// }
// },
// "domain": "post",
// "v": 4
// },
// "sigkey": "4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW",
// "sig": "WNY4WZiT3SLQKFn4J6ESLn8WqPfLRh5fPdTiZTkvDNf5u79wFmXv367UV93XjyzACi6C3fgwZkstq5JczCk3YPH"
// },
// "received": 1712503926457
//}
```
## API
NOTE: All functions that take a callback (cb) return a promise instead if you omit the callback.
### `pzp.db.installEncryptionFormat(encryptionFormat)`
If `encryptionFormat` conforms to the [ssb-encryption-format](https://github.com/ssbc/ssb-encryption-format) spec, then this method will install the `encryptionFormat` in this database instance, meaning that you can now encrypt and decrypt messages using that encryption format.
### `pzp.db.loaded(cb)
Calls back when the database is ready to be used.
### `pzp.db.add(msg: Msg, tangleID: MsgId | null, cb: CB<RecPresent>)
Adds a message to the database. Usually performed automatically when you do other things like publishing messages or syncing from other peers.
### `pzp.db.account.find({ keypair?: KeypairPublicSlice, subdomain: string}, cb: CB<string>)`
Find the account that contains this `keypair` (or the implicit `config.global.keypair`) under the given `subdomain` (will be converted to an actual msg domain).
### `pzp.db.account.create({ keypair?: Keypair, subdomain: string }, cb: CB<string>)`
Create an account (root msg) for the given `keypair` (or the implicit `config.global.keypair`) under the given `subdomain` (will be converted to an actual msg domain).
### `pzp.db.account.findOrCreate({ keypair?: Keypair, subdomain: string }, cb: CB<string>)`
Find or create an account (root msg) for the given `keypair` (or the implicit `config.global.keypair`) under the given `domain` (will be converted to an actual msg domain).
### `pzp.db.account.add({ account: string, keypair: Keypair | KeypairPublicSlice, powers?: Array<AccountPower>, consent?: string }, cb: CB<RecPresent>)`
Add the given `keypair` to the given `account`, authorized by the given `consent` (or implicitly created on the fly if the `keypair` contains the private key) with the specified `powers` (defaulting to no powers).
### `pzp.db.account.del({ account: string, keypair: KeypairPublicSlice }, cb: CB<RecPresent>)`
Remove the given `keypair` from the given `account`.
### `pzp.db.account.consent({ keypair?: KeypairPrivateSlice, account: string }) => string`
Create a consent signature for the given `keypair` (or the implicit `config.global.keypair`) to be added to the given `account`.
### `pzp.db.account.has({ keypair?: KeypairPublicSlice, account: string }, cb: CB<boolean>)
Does this `account` have this `keypair` (or the implicit `config.global.keypair`)?
feed: {
### `pzp.db.feed.publish({ keypair?: Keypair, encryptionFormat?: string, data: object, domain: string, account: string, tangles?: Array<MsgID> }, cb: CB<RecPresent>)`
Publishes a message to the feed of the given `domain`.
### `pzp.db.feed.getID(accountId: string, domain: string) => string`
Gets the moot ID (the ID of an account's domain's root message) for a given account and domain. That message is deterministic so you can calculate its ID even if you e.g. haven't been given it directly.
### `pzp.db.feed.findMoot(accountId: string, domain: string, cb: CB<RecPresent | null>)`
Gets the moot for the specified account and domain from the database. A moot is the root message for an account's domain.
### `pzp.db.getRecord(msgID: MsgID, cb: CB<RecPresent | null>)`
Gets a message's record using its message ID, if you have it in your database. The record has the shape `{ id: string, msg: Msg, received: number }`.
### `pzp.db.get(msgID: MsgID, cb: CB<Msg | null>)`
Gets a message using its message ID, if you have it in your database.
### `pzp.db.del(msgID: MsgID, cb: CB<void>)`
Deletes a specific message from your database.
### `pzp.db.erase(msgID: MsgID, cb: CB<void>)
Erases a specific message in your database. Erasing, as opposed to deleting, only removes a message's `data`. Metadata is kept and the message integrity can still be verified.
### `pzp.db.ghosts.add({ tangleID: MsgID, msgID: MsgID, span: number }, cb: CB<void>)`
Adds a [ghost][ghost] to the database.
### `pzp.db.ghosts.get(tangleID: MsgID) => Array<string>`
Gets a [ghost][ghost] from the database.
### `pzp.db.ghosts.getMinDepth(tangleID: MsgID) => number`
Gets the depth of the ghost in the tangle with the lowest depth.
### `pzp.db.onRecordAdded`
An [obz][obz] observable that triggers when a record is added to the database.
### `pzp.db.onRecordDeletedOrErased`
An [obz][obz] observable that triggers when a record is either deleted or erased. Erasing means that only the `data` field of the message has been cleared.
### `pzp.db.getTangle(tangleID: MsgID, cb: CB<DBTangle | null>)`
Tries to get a `DBTangle` object representing an entire tangle in the database.
### `pzp.db.msgs() => AsyncGenerator<Msg>`
Returns an async generator letting you iterate over all messages in the database.
### `pzp.db.records() => AsyncGenerator<Rec>`
Returns an async generator letting you iterate over all records in the database. The records have the shape `{ id: string, msg: Msg, received: number }` if they exist but they might also be deleted.
### `pzp.db.log.stats(cb: CB<{ totalBytes: number; deletedBytes: number }>)`
Returns some size stats on the log file, where messages are stored.
### `pzp.db.log.compact(cb: CB<void>)`
Makes the log file (the message store) take up less space by compacting it into the space freed by messages that have been deleted.
## License
Copyright © 2023-2024 Andre 'Staltz' Medeiros <contact@staltz.com> and contributors. Licensed under the MIT license.
[ghost]: https://www.manyver.se/blog/2023-11-05
[obz]: https://www.npmjs.com/package/obz

16
declarations/atomic-file-rw.d.ts vendored Normal file
View File

@ -0,0 +1,16 @@
type CB<T> = (...args: [NodeJS.ErrnoException] | [null, T]) => void
declare module 'atomic-file-rw' {
export function readFile(
path: string,
encodingOrOpts: string | { encoding: string },
cb: CB<string>
): void
export function writeFile(
path: string,
data: string,
encodingOrOpts: string | { encoding: string },
cb: CB<string>
): void
export function deleteFile(path: string, cb: CB<null>): void
}

10
declarations/multicb.d.ts vendored Normal file
View File

@ -0,0 +1,10 @@
declare module 'multicb' {
type Opts = {
pluck?: number
spread?: boolean
}
type CB<T> = (...args: [Error] | [null, T] | []) => void
type Done<T> = ((cb: CB<T>) => void) & (() => CB<T>)
function multicb<T>(opts?: Opts): Done<T>
export = multicb
}

12
declarations/mutexify.d.ts vendored Normal file
View File

@ -0,0 +1,12 @@
declare module 'mutexify' {
type CB<T> = T extends void
? (...args: [NodeJS.ErrnoException] | []) => void
: (...args: [NodeJS.ErrnoException] | [null, T]) => void
export type Mutexify<T> = (
fn: (
unlock: (cb: CB<T>, ...args: [Error] | [null, T]) => void
) => void
) => void
function mutexify<T>(): Mutexify<T>
export = mutexify
}

10
declarations/obz.d.ts vendored Normal file
View File

@ -0,0 +1,10 @@
declare module 'obz' {
type Remove = () => void
export interface Obz<X> {
(listener: (value: X) => void): Remove
set(value: X): this
value: X
}
function createObz(): Obz
export = createObz
}

158
lib/db-tangle.js Normal file
View File

@ -0,0 +1,158 @@
const pull = require('pull-stream')
const p = require('node:util').promisify
const MsgV4 = require('./msg-v4')
/**
* @typedef {string} MsgID
* @typedef {import('./msg-v4').Msg} Msg
*/
/**
* @typedef {{
* id?: never;
* msg?: never;
* received?: never;
* }} RecDeleted
*
* @typedef {{
* id: MsgID;
* msg: Msg;
* received: number;
* }} RecPresent
*
* @typedef {RecPresent | RecDeleted} Rec
*/
/**
* @template T
* @typedef {[T] extends [void] ?
* (...args: [Error] | []) => void :
* (...args: [Error] | [null, T]) => void
* } CB
*/
class DBTangle extends MsgV4.Tangle {
/** @type {(msgID: MsgID, cb: CB<Msg>) => void} */
#getMsg
/**
* @param {MsgID} rootID
* @param {(msgID: MsgID, cb: CB<Msg>) => void} getMsg
*/
constructor(rootID, getMsg) {
super(rootID)
this.#getMsg = getMsg
}
/**
* @param {MsgID} rootID
* @param {AsyncIterable<Rec>} recordsIter
* @param {(msgID: MsgID, cb: any) => void} getMsg
* @return {Promise<DBTangle>}
*/
static async init(rootID, recordsIter, getMsg) {
const dbtangle = new DBTangle(rootID, getMsg)
for await (const rec of recordsIter) {
if (!rec.msg) continue
dbtangle.add(rec.id, rec.msg)
}
return dbtangle
}
/**
* Given a set of msgs (`msgIDs`) in this tangle, find all "deletable" and
* "erasable" msgs that precede that set.
*
* *Deletables* are msgs that precede `msgsIDs` but are not important in any
* validation path toward the root, and thus can be deleted.
*
* *Erasables* are msgs that precede `msgsIDs` and can be erased without
* losing a validation path toward the root.
* @param {Array<MsgID>} msgIDs
* @returns {{ deletables: Set<MsgID>, erasables: Set<MsgID> } | null}
*/
getDeletablesAndErasables(...msgIDs) {
// Determine erasables
const erasables = new Set()
const minimum = this.getMinimumAmong(msgIDs)
for (const msgID of minimum) {
const trail = this.shortestPathToRoot(msgID)
if (!trail) return null
for (const id of trail) {
erasables.add(id)
}
}
// Determine deletables
const deletables = new Set()
const sorted = this.topoSort()
for (const msgID of sorted) {
if (erasables.has(msgID)) continue
if (minimum.some((min) => this.precedes(msgID, min))) {
deletables.add(msgID)
}
}
return { deletables, erasables }
}
/**
* @param {Array<string>=} minSet
* @param {Array<string>=} maxSet
* @param {CB<Array<Msg>>=} cb
* @return {Promise<Array<Msg>>|void}
*/
slice(minSet = [], maxSet = [], cb) {
// @ts-ignore
if (cb === undefined) return p(this.slice).bind(this)(minSet, maxSet)
const minSetGood = minSet.filter((msgID) => this.has(msgID))
const maxSetGood = maxSet.filter((msgID) => this.has(msgID))
const minSetTight = this.getMinimumAmong(minSetGood)
const trail = new Set()
for (const msgID of minSetTight) {
const path = this.shortestPathToRoot(msgID)
if (!path) return cb(Error("Couldn't get shortest path to root when slicing dbtangle"))
for (const msgID of path) {
trail.add(msgID)
}
}
const msgs = /**@type {Array<Msg>}*/ ([])
pull(
pull.values(this.topoSort()),
pull.asyncMap((msgID, cb) => {
this.#getMsg(msgID, (err, msg) => {
if (err) return cb(err)
cb(null, { id: msgID, msg })
})
}),
pull.drain(
(rec) => {
if (trail.has(rec.id)) {
if (rec.msg) msgs.push({ ...rec.msg, data: null })
}
const isMin = minSetGood.includes(rec.id)
const isMax = maxSetGood.includes(rec.id)
const isBeforeMin = minSetGood.some((min) =>
this.precedes(rec.id, min)
)
const isAfterMax = maxSetGood.some((max) =>
this.precedes(max, rec.id)
)
if (!isMin && isBeforeMin) return
if (!isMax && isAfterMax) return
if (rec.msg) msgs.push(rec.msg)
},
(err) => {
if (err) return cb(Error('DBTangle.slice() failed', { cause: err }))
return cb(null, msgs)
}
)
)
}
}
module.exports = DBTangle

View File

@ -1,67 +1,107 @@
const FeedV1 = require('./feed-v1')
const base58 = require('bs58')
const b4a = require('b4a')
const MsgV4 = require('./msg-v4')
/**
* @typedef {import('./index').Msg} Msg
* @typedef {import('./index').RecPresent} RecPresent
* @typedef {import('./index').Rec} Rec
* @typedef {import('./index').Misc} Misc
* @typedef {import('pzp-keypair').Keypair} Keypair
*
* @typedef {Buffer | Uint8Array} B4A
*
* @typedef {{
* name: string;
* setup?: (config: any, cb: any) => void;
* onReady?: (cb: any) => void;
* encrypt: (plaintext: B4A, opts: any) => B4A;
* decrypt: (ciphertext: B4A, opts: any) => B4A | null;
* }} EncryptionFormat
*/
/**
* @param {string} str
*/
function ciphertextStrToBuffer(str) {
const dot = str.indexOf('.')
return Buffer.from(str.slice(0, dot), 'base64')
return b4a.from(str.slice(0, dot), 'base64')
}
/**
* @param {Rec} rec
* TODO: eventually get rid of this
* @param {Keypair} keypair
*/
function keypairToSSBKeys(keypair) {
const _public = b4a.from(base58.decode(keypair.public)).toString('base64')
const _private = b4a.from(base58.decode(keypair.private)).toString('base64')
return {
id: `@${_public}.ed25519`,
curve: keypair.curve,
public: _public,
private: _private,
}
}
const decryptCache = new WeakMap()
/**
* @template {{msg: Msg}} T
* @param {T} rec
* @param {any} peer
* @param {any} config
* @returns {Rec}
* @returns {T}
*/
function decrypt(rec, peer, config) {
if (decryptCache.has(rec)) return decryptCache.get(rec)
const msgEncrypted = rec.msg
const { content } = msgEncrypted
if (typeof content !== 'string') return rec
const { data } = msgEncrypted
if (typeof data !== 'string') return rec
const encryptionFormat = peer.db.findEncryptionFormatFor(content)
const encryptionFormat = peer.db.findEncryptionFormatFor(data)
if (!encryptionFormat) return rec
// Decrypt
const ciphertextBuf = ciphertextStrToBuffer(content)
const opts = { keys: config.keys }
const ciphertextBuf = ciphertextStrToBuffer(data)
const opts = { keys: keypairToSSBKeys(config.global.keypair) }
const plaintextBuf = encryptionFormat.decrypt(ciphertextBuf, opts)
if (!plaintextBuf) return rec
// Reconstruct KVT in JS encoding
const msgDecrypted = FeedV1.fromPlaintextBuffer(plaintextBuf, msgEncrypted)
const msgDecrypted = MsgV4.fromPlaintextBuffer(plaintextBuf, msgEncrypted)
return {
hash: rec.hash,
const recDecrypted = {
...rec,
msg: msgDecrypted,
received: rec.received,
misc: {
...rec.misc,
// ...rec.misc,
private: true,
originalContent: content,
originalData: data,
encryptionFormat: encryptionFormat.name,
},
}
decryptCache.set(rec, recDecrypted)
return recDecrypted
}
function reEncrypt(rec) {
return {
hash: rec.hash,
msg: { ...rec.msg, content: rec.misc.originalContent },
received: rec.received,
...(rec.misc.size
? {
misc: {
offset: rec.misc.offset,
size: rec.misc.size,
},
}
: null),
}
}
/**
* @param {RecPresent} rec
* @returns {RecPresent}
*/
// function reEncrypt(rec) {
// return {
// id: rec.id,
// msg: { ...rec.msg, data: rec.misc.originalData },
// received: rec.received,
// misc: {
// seq: rec.misc.seq,
// offset: rec.misc.offset,
// size: rec.misc.size,
// },
// }
// }
module.exports = {
decrypt,
reEncrypt,
// reEncrypt,
}

View File

@ -1,53 +0,0 @@
const blake3 = require('blake3')
const base58 = require('bs58')
const stringify = require('json-canon')
/**
* @typedef {import('./index').Msg} Msg
*/
/**
* @param {Msg} msg
* @returns {Buffer}
*/
function getMsgHashBuf(msg) {
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
return blake3.hash(metadataBuf).subarray(0, 16)
}
/**
* @param {Msg | string} x
* @returns {string}
*/
function getMsgHash(x) {
if (typeof x === 'string') {
if (x.startsWith('ppppp:message/v1/')) {
const msgUri = x
const parts = msgUri.split('/')
return parts[parts.length - 1]
} else {
const msgHash = x
return msgHash
}
} else {
const msg = x
const msgHashBuf = getMsgHashBuf(msg)
return base58.encode(msgHashBuf)
}
}
/**
* @param {Msg} msg
* @returns {string}
*/
function getMsgId(msg) {
const { who, type } = msg.metadata
const msgHash = getMsgHash(msg)
if (type) {
return `ppppp:message/v1/${who}/${type}/${msgHash}`
} else {
return `ppppp:message/v1/${who}/${msgHash}`
}
}
module.exports = { getMsgId, getMsgHash }

View File

@ -1,212 +0,0 @@
const stringify = require('json-canon')
const ed25519 = require('ssb-keys/sodium')
const base58 = require('bs58')
const union = require('set.prototype.union')
const { stripAuthor } = require('./strip')
const { getMsgId, getMsgHash } = require('./get-msg-id')
const representContent = require('./represent-content')
const {
validateType,
validateContent,
validate,
validateBatch,
validateMsgHash,
} = require('./validation')
const Tangle = require('./tangle')
function isEmptyObject(obj) {
for (const _key in obj) {
return false
}
return true
}
/**
* @typedef {Iterator<Msg> & {values: () => Iterator<Msg>}} MsgIter
*/
/**
* @typedef {Object} TangleMetadata
* @property {number} depth
* @property {Array<string>} prev
*/
/**
* @typedef {Object} Msg
* @property {*} content
* @property {Object} metadata
* @property {string} metadata.hash
* @property {number} metadata.size
* @property {Record<string, TangleMetadata>} metadata.tangles
* @property {string} metadata.type
* @property {1} metadata.v
* @property {string} metadata.who
* @property {string} sig
*/
/**
* @typedef {Object} Keys
* @property {string} keys.id
* @property {string} keys.private
*/
/**
* @typedef {Object} CreateOpts
* @property {*} content
* @property {string} type
* @property {Keys} keys
* @property {Record<string, Tangle>} tangles
*/
/**
* @typedef {Object} CreateRootOpts
* @property {string} type
* @property {Keys} keys
* @property {string} keys.id
* @property {string} keys.private
*/
function isFeedRoot(msg, authorId, findType) {
const findWho = stripAuthor(authorId)
const { who, type, tangles } = msg.metadata
return who === findWho && type === findType && isEmptyObject(tangles)
}
function getFeedRootHash(authorId, type) {
const who = stripAuthor(authorId)
const msg = {
content: null,
metadata: {
hash: null,
size: 0,
tangles: {},
type,
v: 1,
who,
},
sig: '',
}
return getMsgHash(msg)
}
function toPlaintextBuffer(opts) {
return Buffer.from(stringify(opts.content), 'utf8')
}
/**
* @param {CreateOpts} opts
* @returns {Msg}
*/
function create(opts) {
let err
if ((err = validateType(opts.type))) throw err
if (!opts.tangles) throw new Error('opts.tangles is required')
const [hash, size] = representContent(opts.content)
const tangles = {}
if (opts.tangles) {
for (const rootId in opts.tangles) {
if ((err = validateMsgHash(rootId))) throw err
const tangle = opts.tangles[rootId]
const depth = tangle.getMaxDepth() + 1
const tips = tangle.getTips()
const lipmaaSet = tangle.getLipmaaSet(depth)
const prev = ([...union(lipmaaSet, tips)]).sort()
tangles[rootId] = { depth, prev }
}
} else {
// prettier-ignore
throw new Error(`cannot create msg without tangles, that's the case for createRoot()`)
}
const msg = {
content: opts.content,
metadata: {
hash,
size,
tangles,
type: opts.type,
v: 1,
who: stripAuthor(opts.keys.id),
},
sig: '',
}
if ((err = validateContent(msg))) throw err
const privateKey = Buffer.from(opts.keys.private, 'base64')
// TODO: add a label prefix to the metadata before signing
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
// TODO: when signing, what's the point of a customizable hmac?
const sigBuf = ed25519.sign(privateKey, metadataBuf)
msg.sig = base58.encode(sigBuf)
return msg
}
/**
* @param {Keys} keys
* @param {string} type
* @returns {Msg}
*/
function createRoot(keys, type) {
let err
if ((err = validateType(type))) throw err
const msg = {
content: null,
metadata: {
hash: null,
size: 0,
tangles: {},
type,
v: 1,
who: stripAuthor(keys.id),
},
sig: '',
}
const privateKey = Buffer.from(keys.private, 'base64')
// TODO: add a label prefix to the metadata before signing
const metadataBuf = Buffer.from(stringify(msg.metadata), 'utf8')
// TODO: when signing, what's the point of a customizable hmac?
const sigBuf = ed25519.sign(privateKey, metadataBuf)
msg.sig = base58.encode(sigBuf)
return msg
}
/**
* @param {Msg} msg
* @returns {Msg}
*/
function erase(msg) {
return { ...msg, content: null }
}
/**
* @param {Buffer} plaintextBuf
* @param {Msg} msg
* @returns {Msg}
*/
function fromPlaintextBuffer(plaintextBuf, msg) {
return { ...msg, content: JSON.parse(plaintextBuf.toString('utf-8')) }
}
module.exports = {
getMsgHash,
getMsgId,
isFeedRoot,
getFeedRootHash,
create,
createRoot,
erase,
stripAuthor,
toPlaintextBuffer,
fromPlaintextBuffer,
Tangle,
validate,
validateBatch,
}

View File

@ -1,16 +0,0 @@
const blake3 = require('blake3')
const base58 = require('bs58')
const stringify = require('json-canon')
/**
* @param {any} content
* @returns {[string, number]}
*/
function representContent(content) {
const contentBuf = Buffer.from(stringify(content), 'utf8')
const hash = base58.encode(blake3.hash(contentBuf).subarray(0, 16))
const size = contentBuf.length
return [hash, size]
}
module.exports = representContent

View File

@ -1,29 +0,0 @@
const { getMsgHash } = require('./get-msg-id')
function stripMsgKey(msgKey) {
if (typeof msgKey === 'object') {
if (msgKey.key) return stripMsgKey(msgKey.key)
else return getMsgHash(msgKey)
}
if (msgKey.startsWith('ppppp:message/v1/')) {
const parts = msgKey.split('/')
return parts[parts.length - 1]
} else {
return msgKey
}
}
/**
* @param {string} id
* @returns {string}
*/
function stripAuthor(id) {
if (id.startsWith('ppppp:feed/v1/') === false) return id
const withoutPrefix = id.replace('ppppp:feed/v1/', '')
return withoutPrefix.split('/')[0]
}
module.exports = {
stripMsgKey,
stripAuthor,
}

View File

@ -1,261 +0,0 @@
/**
* @typedef {import("./index").Msg} Msg
*/
function lipmaa(n) {
let m = 1
let po3 = 3
let u = n
// find k such that (3^k - 1)/2 >= n
while (m < n) {
po3 *= 3
m = (po3 - 1) / 2
}
// find longest possible backjump
po3 /= 3
if (m !== n) {
while (u !== 0) {
m = (po3 - 1) / 2
po3 /= 3
u %= m
}
if (m !== po3) {
po3 = m
}
}
return n - po3
}
/**
* @param {string} a
* @param {string} b
* @returns number
*/
function compareMsgHashes(a, b) {
return a.localeCompare(b)
}
class Tangle {
/**
* @type {string}
*/
#rootHash
/**
* @type {Msg}
*/
#rootMsg
/**
* @type {Set<string>}
*/
#tips = new Set()
/**
* @type {Map<string, Array<string>>}
*/
#prev = new Map()
/**
* @type {Map<string, number>}
*/
#depth = new Map()
/**
* @type {Map<number, Array<string>>}
*/
#perDepth = new Map()
/**
* @type {number}
*/
#maxDepth
/**
* @param {string} rootHash
* @param {Iterable<Msg>} msgsIter
*/
constructor(rootHash) {
this.#rootHash = rootHash
this.#maxDepth = 0
}
add(msgHash, msg) {
if (msgHash === this.#rootHash && !this.#rootMsg) {
this.#tips.add(msgHash)
this.#perDepth.set(0, [msgHash])
this.#depth.set(msgHash, 0)
this.#rootMsg = msg
return
}
const tangles = msg.metadata.tangles
if (msgHash !== this.#rootHash && tangles[this.#rootHash]) {
this.#tips.add(msgHash)
const prev = tangles[this.#rootHash].prev
for (const p of prev) {
this.#tips.delete(p)
}
this.#prev.set(msgHash, prev)
const depth = tangles[this.#rootHash].depth
if (depth > this.#maxDepth) this.#maxDepth = depth
this.#depth.set(msgHash, depth)
const atDepth = this.#perDepth.get(depth) ?? []
atDepth.push(msgHash)
atDepth.sort(compareMsgHashes)
this.#perDepth.set(depth, atDepth)
return
}
}
/**
* @param {number} depth
* @returns {Array<string>}
*/
#getAllAtDepth(depth) {
return this.#perDepth.get(depth) ?? []
}
/**
* @returns {Array<string>}
*/
topoSort() {
if (!this.#rootMsg) {
console.warn('Tangle is missing root message')
return []
}
const sorted = []
const max = this.#maxDepth
for (let i = 0; i <= max; i++) {
const atDepth = this.#getAllAtDepth(i)
for (const msgHash of atDepth) {
sorted.push(msgHash)
}
}
return sorted
}
/**
* @returns {Set<string>}
*/
getTips() {
if (!this.#rootMsg) {
console.warn('Tangle is missing root message')
return new Set()
}
return this.#tips
}
/**
* @param {number} depth
* @returns {Set<string>}
*/
getLipmaaSet(depth) {
if (!this.#rootMsg) {
console.warn('Tangle is missing root message')
return new Set()
}
const lipmaaDepth = lipmaa(depth + 1) - 1
return new Set(this.#getAllAtDepth(lipmaaDepth))
}
/**
* @param {string} msgHash
* @returns {boolean}
*/
has(msgHash) {
return this.#depth.has(msgHash)
}
/**
* @param {string} msgHash
* @returns {number}
*/
getDepth(msgHash) {
return this.#depth.get(msgHash) ?? -1
}
isFeed() {
if (!this.#rootMsg) {
console.warn('Tangle is missing root message')
return false
}
if (this.#rootMsg.content) return false
const metadata = this.#rootMsg.metadata
return metadata.size === 0 && metadata.hash === null
}
getFeed() {
if (!this.isFeed()) return null
const { type, who } = this.#rootMsg.metadata
return { type, who }
}
shortestPathToRoot(msgHash) {
if (!this.#rootMsg) {
console.warn('Tangle is missing root message')
return []
}
const path = []
let current = msgHash
while (true) {
const prev = this.#prev.get(current)
if (!prev) break
let minDepth = this.#depth.get(current)
let min = current
for (const p of prev) {
const d = this.#depth.get(p)
if (d < minDepth) {
minDepth = d
min = p
} else if (d === minDepth && compareMsgHashes(p, min) < 0) {
min = p
}
}
path.push(min)
current = min
}
return path
}
precedes(a, b) {
if (!this.#rootMsg) {
console.warn('Tangle is missing root message')
return false
}
if (a === b) return false
if (b === this.#rootHash) return false
let toCheck = [b]
while (toCheck.length > 0) {
const prev = this.#prev.get(toCheck.shift())
if (!prev) continue
if (prev.includes(a)) return true
toCheck.push(...prev)
}
return false
}
size() {
return this.#depth.size
}
getMaxDepth() {
return this.#maxDepth
}
debug() {
let str = ''
const max = this.#maxDepth
for (let i = 0; i <= max; i++) {
const atDepth = this.#getAllAtDepth(i)
str += `Depth ${i}: ${atDepth.join(', ')}\n`
}
return str
}
}
module.exports = Tangle

View File

@ -1,249 +0,0 @@
const base58 = require('bs58')
const ed25519 = require('ssb-keys/sodium')
const stringify = require('json-canon')
const Tangle = require('./tangle')
const representContent = require('./represent-content')
function validateShape(msg) {
if (!msg || typeof msg !== 'object') {
return new Error('invalid message: not an object')
}
if (!msg.metadata || typeof msg.metadata !== 'object') {
return new Error('invalid message: must have metadata')
}
if (typeof msg.metadata.who === 'undefined') {
return new Error('invalid message: must have metadata.who')
}
if (msg.metadata.v !== 1) {
return new Error('invalid message: must have metadata.v 1')
}
if (typeof msg.metadata.tangles !== 'object') {
return new Error('invalid message: must have metadata.tangles')
}
if (typeof msg.metadata.hash === 'undefined') {
return new Error('invalid message: must have metadata.hash')
}
if (typeof msg.metadata.size === 'undefined') {
return new Error('invalid message: must have metadata.size')
}
if (typeof msg.content === 'undefined') {
return new Error('invalid message: must have content')
}
if (typeof msg.sig === 'undefined') {
return new Error('invalid message: must have sig')
}
}
function validateWho(msg) {
try {
const whoBuf = base58.decode(msg.metadata.who)
if (whoBuf.length !== 32) {
return new Error(
`invalid message: decoded "who" should be 32 bytes but was ${whoBuf.length}`
)
}
} catch (err) {
return new Error('invalid message: must have "who" as base58 string')
}
}
function validateMsgHash(str) {
try {
const hashBuf = Buffer.from(base58.decode(str))
if (hashBuf.length !== 16) {
return new Error(
`invalid message: decoded hash should be 16 bytes but was ${hashBuf.length}`
)
}
} catch (err) {
return new Error(
`invalid message: msgHash ${str} should have been a base58 string`
)
}
}
function validateSize(msg) {
const {
metadata: { size },
} = msg
if (!Number.isSafeInteger(size) || size < 0) {
return new Error(`invalid message: "size" should be an unsigned integer`)
}
}
function validateSignature(msg) {
const { sig } = msg
if (typeof sig !== 'string') {
return new Error('invalid message: must have sig as a string')
}
let sigBuf
try {
sigBuf = Buffer.from(base58.decode(sig))
if (sigBuf.length !== 64) {
// prettier-ignore
return new Error('invalid message: sig should be 64 bytes but was ' + sigBuf.length + ', on feed: ' + msg.metadata.who);
}
} catch (err) {
return new Error('invalid message: sig must be a base58 string')
}
const publicKeyBuf = Buffer.from(base58.decode(msg.metadata.who))
const signableBuf = Buffer.from(stringify(msg.metadata), 'utf8')
const verified = ed25519.verify(publicKeyBuf, sigBuf, signableBuf)
if (!verified) {
// prettier-ignore
return new Error('invalid message: sig does not match, on feed: ' + msg.metadata.who);
}
}
/**
*
* @param {any} msg
* @param {Tangle} tangle
* @param {*} tangleId
* @returns
*/
function validateTangle(msg, tangle, tangleId) {
if (!msg.metadata.tangles[tangleId]) {
return new Error('invalid message: must have metadata.tangles.' + tangleId)
}
const { depth, prev } = msg.metadata.tangles[tangleId]
if (!prev || !Array.isArray(prev)) {
// prettier-ignore
return new Error('invalid message: prev must be an array, on feed: ' + msg.metadata.who);
}
if (!Number.isSafeInteger(depth) || depth <= 0) {
// prettier-ignore
return new Error('invalid message: depth must be a positive integer, on feed: ' + msg.metadata.who);
}
if (tangle.isFeed()) {
const { type, who } = tangle.getFeed()
if (type !== msg.metadata.type) {
// prettier-ignore
return new Error(`invalid message: type "${msg.metadata.type}" does not match feed type "${type}"`)
}
if (who !== msg.metadata.who) {
// prettier-ignore
return new Error(`invalid message: who "${msg.metadata.who}" does not match feed who "${who}"`)
}
}
let lastPrev = null
let minDiff = Infinity
let countPrevUnknown = 0
for (const p of prev) {
if (typeof p !== 'string') {
// prettier-ignore
return new Error('invalid message: prev must contain strings but found ' + p + ', on feed: ' + msg.metadata.who);
}
if (p.startsWith('ppppp:')) {
// prettier-ignore
return new Error('invalid message: prev must not contain URIs, on feed: ' + msg.metadata.who);
}
if (lastPrev !== null) {
if (p === lastPrev) {
return new Error(`invalid message: prev must be unique set, on feed ${msg.metadata.who}`)
}
if (p < lastPrev) {
return new Error(`invalid message: prev must be sorted in alphabetical order, on feed ${msg.metadata.who}`)
}
}
lastPrev = p
if (!tangle.has(p)) {
countPrevUnknown += 1
continue
}
const prevDepth = tangle.getDepth(p)
const diff = depth - prevDepth
if (diff <= 0) {
// prettier-ignore
return new Error('invalid message: depth of prev ' + p + ' is not lower, on feed: ' + msg.metadata.who);
}
if (diff < minDiff) minDiff = diff
}
if (countPrevUnknown === prev.length) {
// prettier-ignore
return new Error('invalid message: all prev are locally unknown, on feed: ' + msg.metadata.who)
}
if (countPrevUnknown === 0 && minDiff !== 1) {
// prettier-ignore
return new Error('invalid message: depth must be the largest prev depth plus one');
}
}
function validateTangleRoot(msg, msgHash, tangleId) {
if (msgHash !== tangleId) {
// prettier-ignore
return new Error('invalid message: tangle root hash must match tangleId, on feed: ' + msg.metadata.who);
}
if (msg.metadata.tangles[tangleId]) {
// prettier-ignore
return new Error('invalid message: tangle root must not have self tangle data, on feed: ' + msg.metadata.who);
}
}
function validateType(type) {
if (!type || typeof type !== 'string') {
// prettier-ignore
return new Error('type is not a string');
}
if (type.length > 100) {
// prettier-ignore
return new Error('invalid type ' + type + ' is 100+ characters long');
}
if (type.length < 3) {
// prettier-ignore
return new Error('invalid type ' + type + ' is shorter than 3 characters');
}
if (/[^a-zA-Z0-9_]/.test(type)) {
// prettier-ignore
return new Error('invalid type ' + type + ' contains characters other than a-z, A-Z, 0-9, or _');
}
}
function validateContent(msg) {
const { content } = msg
if (content === null) {
return
}
if (Array.isArray(content)) {
return new Error('invalid message: content must not be an array')
}
if (typeof content !== 'object' && typeof content !== 'string') {
// prettier-ignore
return new Error('invalid message: content must be an object or string, on feed: ' + msg.metadata.who);
}
const [hash, size] = representContent(content)
if (hash !== msg.metadata.hash) {
// prettier-ignore
return new Error('invalid message: content hash does not match metadata.hash, on feed: ' + msg.metadata.who);
}
if (size !== msg.metadata.size) {
// prettier-ignore
return new Error('invalid message: content size does not match metadata.size, on feed: ' + msg.metadata.who);
}
}
function validate(msg, tangle, msgHash, rootHash) {
let err
if ((err = validateShape(msg))) return err
if ((err = validateWho(msg))) return err
if ((err = validateSize(msg))) return err
if (tangle.size() === 0) {
if ((err = validateTangleRoot(msg, msgHash, rootHash))) return err
} else {
if ((err = validateTangle(msg, tangle, rootHash))) return err
}
if ((err = validateContent(msg))) return err
if ((err = validateSignature(msg))) return err
}
module.exports = {
validateType,
validateContent,
validate,
validateMsgHash,
}

206
lib/ghosts.js Normal file
View File

@ -0,0 +1,206 @@
const FS = require('fs')
const Path = require('path')
const atomic = require('atomic-file-rw')
const multicb = require('multicb')
const mutexify = require('mutexify')
const Doneable = require('./utils/doneable')
// TODO: fs is only supported in node.js. We should support browser by replacing
// fs.readdir with a browser "file" that just lists all ghost files.
/**
* @typedef {import('./index').MsgID} MsgID
*/
/**
* @template T
* @typedef {import('mutexify').Mutexify<T>} Mutexify
*/
/**
* @template T
* @typedef {T extends void ?
* (...args: [Error] | []) => void :
* (...args: [Error] | [null, T]) => void
* } CB
*/
class Ghosts {
/** @type {string} */
#basePath
/** @type {Doneable<void>} */
#loaded
/** @type {Map<MsgID, Map<string, number>>} */
#maps
/** @type {Mutexify<void>} */
#writeLock
static encodingOpts = { encoding: 'utf-8' }
/**
* @param {string} basePath
*/
constructor(basePath) {
this.#basePath = basePath
this.#maps = new Map()
this.#loaded = new Doneable()
this.#writeLock = mutexify()
// Load all ghosts files into Maps in memory
// TODO this is opening up ALL the files at once, perhaps we should allow a
// specific max concurrent number of reads? i.e. not fully sequential
// neither fully parallel
if (FS.existsSync(basePath)) {
const done = multicb({ pluck: 1 })
for (const tangleID of FS.readdirSync(basePath)) {
const cb = done()
this.#read(tangleID, (err, map) => {
// prettier-ignore
if (err) return cb(new Error('GhostDB failed to read ghost file', { cause: err }))
this.#maps.set(tangleID, map)
cb()
})
}
done((err, _) => {
// prettier-ignore
if (err) throw new Error('GhostDB failed to load', { cause: err })
this.#loaded.done()
})
} else {
this.#loaded.done()
}
}
/**
* @param {string} tangleID
*/
#path(tangleID) {
return Path.join(this.#basePath, tangleID)
}
/**
* @param {Map<string, number>} map
* @returns {string}
*/
#serialize(map) {
return JSON.stringify([...map])
}
/**
* @param {string} str
* @returns {Map<string, number>}
*/
#deserialize(str) {
return new Map(JSON.parse(str))
}
/**
* @param {string} tangleID
* @param {CB<Map<string, number>>} cb
*/
#read(tangleID, cb) {
atomic.readFile(this.#path(tangleID), Ghosts.encodingOpts, (err, str) => {
// Load Map
/** @type {Map<string, number>} */
let map
if (err && err.code === 'ENOENT') map = new Map()
// prettier-ignore
else if (err) return cb(new Error('GhostDB.read() failed to read ghost file', { cause: err }))
else map = this.#deserialize(str)
cb(null, map)
})
}
/**
* @param {() => void} cb
*/
onReady(cb) {
this.#loaded.onDone(cb)
}
/**
* @param {string} tangleID
* @param {string} msgID
* @param {number} depth
* @param {number} span
* @param {CB<void>} cb
*/
save(tangleID, msgID, depth, span, cb) {
this.#writeLock((unlock) => {
this.#loaded.onDone(() => {
if (!this.#maps.has(tangleID)) this.#maps.set(tangleID, new Map())
const map = this.#maps.get(tangleID)
const newMap = new Map(/** @type {Map<string, number>} */ (map))
newMap.set(msgID, depth)
// Garbage collect any ghost smaller than largestDepth - span
let largestDepth = -1
for (const depth of newMap.values()) {
if (depth > largestDepth) largestDepth = depth
}
for (const [x, depth] of newMap.entries()) {
if (depth <= largestDepth - span) newMap.delete(x)
}
atomic.writeFile(
this.#path(tangleID),
this.#serialize(newMap),
Ghosts.encodingOpts,
(err, _) => {
// prettier-ignore
if (err) return unlock(cb, new Error('GhostDB.save() failed to write ghost file', { cause: err }))
this.#maps.set(tangleID, newMap)
unlock(cb, null, void 0)
}
)
})
})
}
/**
* @param {string} tangleID
* @param {string} msgID
* @param {CB<void>} cb
*/
remove(tangleID, msgID, cb) {
this.#writeLock((unlock) => {
this.#loaded.onDone(() => {
if (!this.#maps.has(tangleID)) return unlock(cb, null, void 0)
const map = /** @type {Map<string, number>} */ (
this.#maps.get(tangleID)
)
if (!map.has(msgID)) return unlock(cb, null, void 0)
const newMap = new Map(map)
newMap.delete(msgID)
atomic.writeFile(
this.#path(tangleID),
this.#serialize(newMap),
Ghosts.encodingOpts,
(err, _) => {
// prettier-ignore
if (err) return unlock(cb,new Error('GhostDB.save() failed to write ghost file', { cause: err }))
this.#maps.set(tangleID, newMap)
unlock(cb, null, void 0)
}
)
})
})
}
/**
* @param {string} tangleID
* @returns {Map<string, number>}
*/
read(tangleID) {
if (!this.#loaded.isDone) {
throw new Error('GhostDB.read() called before loaded')
}
return this.#maps.get(tangleID) ?? new Map()
}
}
module.exports = Ghosts

File diff suppressed because it is too large Load Diff

69
lib/log/errors.js Normal file
View File

@ -0,0 +1,69 @@
class ErrorWithCode extends Error {
/**
* @param {string} message
* @param {string} code
*/
constructor(message, code) {
super(message)
this.code = code
}
}
/**
* @param {number} offset
*/
function nanOffsetErr(offset) {
return new ErrorWithCode(`Offset ${offset} is not a number`, 'INVALID_OFFSET')
}
/**
* @param {number} offset
*/
function negativeOffsetErr(offset) {
return new ErrorWithCode(`Offset ${offset} is negative`, 'INVALID_OFFSET')
}
/**
* @param {number} offset
* @param {number} logSize
*/
function outOfBoundsOffsetErr(offset, logSize) {
return new ErrorWithCode(
`Offset ${offset} is beyond log size ${logSize}`,
'OFFSET_OUT_OF_BOUNDS'
)
}
function deletedRecordErr() {
return new ErrorWithCode('Record has been deleted', 'DELETED_RECORD')
}
function delDuringCompactErr() {
return new Error('Cannot delete while compaction is in progress')
}
function compactWithMaxLiveStreamErr() {
// prettier-ignore
return new Error('Compaction cannot run if there are live streams configured with opts.lt or opts.lte')
}
function overwriteLargerThanOld() {
// prettier-ignore
return new Error('Data to be overwritten should not be larger than existing data')
}
function appendLargerThanBlockErr() {
return new Error('Data to be appended is larger than block size')
}
module.exports = {
ErrorWithCode,
nanOffsetErr,
negativeOffsetErr,
outOfBoundsOffsetErr,
deletedRecordErr,
delDuringCompactErr,
compactWithMaxLiveStreamErr,
overwriteLargerThanOld,
appendLargerThanBlockErr,
}

958
lib/log/index.js Normal file
View File

@ -0,0 +1,958 @@
const fs = require('fs')
const b4a = require('b4a')
const p = require('promisify-tuple')
const AtomicFile = require('atomic-file-rw')
const mutexify = require('mutexify')
const Obz = require('obz') // @ts-ignore
const Cache = require('@alloc/quick-lru') // @ts-ignore
const RAF = require('polyraf') // @ts-ignore
const debounce = require('lodash.debounce') // @ts-ignore
const isBufferZero = require('is-buffer-zero') // @ts-ignore
const debug = require('debug')('pzp-db:log')
const {
deletedRecordErr,
nanOffsetErr,
negativeOffsetErr,
outOfBoundsOffsetErr,
appendLargerThanBlockErr,
overwriteLargerThanOld,
delDuringCompactErr,
} = require('./errors')
const Record = require('./record')
/**
* @typedef {Buffer | Uint8Array} B4A
* @typedef {number} BlockIndex
*/
/**
* @template T
* @typedef {import('mutexify').Mutexify<T>} Mutexify
*/
/**
* @template T
* @typedef {import('obz').Obz<T>} Obz
*/
/**
* @template T
* @typedef {{
* encode: (data: T) => B4A,
* decode: (data: B4A) => T
* }} Codec
*/
/**
* @template Type
* @typedef {Type extends Codec<infer X> ? X : never} extractCodecType
*/
/**
* @template T
* @typedef {{
* blockSize?: number,
* codec?: Codec<T>,
* writeTimeout?: number,
* validateRecord?: (data: B4A) => boolean
* }} Options
*/
/**
* @template T
* @typedef {T extends void ?
* (...args: [NodeJS.ErrnoException] | []) => void :
* (...args: [NodeJS.ErrnoException] | [null, T]) => void
* } CB
*/
/**
* @param {unknown} check
* @param {string} message
* @returns {asserts check}
*/
function assert(check, message) {
if (!check) throw new Error(message)
}
const DEFAULT_BLOCK_SIZE = 65536
const DEFAULT_WRITE_TIMEOUT = 250
const DEFAULT_VALIDATE = () => true
const COMPACTION_PROGRESS_START = { percent: 0, done: false }
const COMPACTION_PROGRESS_END_EMPTY = {
percent: 1,
done: true,
sizeDiff: 0,
holesFound: 0,
}
const COMPACTION_PROGRESS_EMIT_INTERVAL = 500
/**
* @template [T=B4A]
* @param {string} filename
* @param {Options<T>} opts
*/
function Log(filename, opts) {
const DEFAULT_CODEC = /** @type {Codec<T>} */ (
/** @type {any} */ ({
encode: (/** @type {any} */ x) => x,
decode: (/** @type {any} */ x) => x,
})
)
const cache = new Cache({ maxSize: 1024 }) // This is potentially 64 MiB!
let raf = RAF(filename)
const statsFilename = filename + 'stats.json'
const blockSize = opts?.blockSize ?? DEFAULT_BLOCK_SIZE
const codec = opts?.codec ?? DEFAULT_CODEC
const writeTimeout = opts?.writeTimeout ?? DEFAULT_WRITE_TIMEOUT
const validateRecord = opts?.validateRecord ?? DEFAULT_VALIDATE
/**
* @type {Array<CallableFunction>}
*/
const waitingLoad = []
/** @type {Map<BlockIndex, Array<CallableFunction>>} */
const waitingDrain = new Map() // blockIndex -> []
/** @type {Array<CB<any>>} */
const waitingFlushOverwrites = []
/** @type {Map<BlockIndex, {blockBuf: B4A; offset: number}>} */
const blocksToBeWritten = new Map() // blockIndex -> { blockBuf, offset }
/** @type {Map<BlockIndex, B4A>} */
const blocksWithOverwritables = new Map() // blockIndex -> blockBuf
let flushingOverwrites = false
let writingBlockIndex = -1
let latestBlockBuf = /** @type {B4A | null} */ (null)
let latestBlockIndex = /** @type {number | null} */ (null)
let nextOffsetInBlock = /** @type {number | null} */ (null)
let deletedBytes = 0
/** Offset of last written record @type {Obz<number>} */
const lastRecOffset = Obz()
let compacting = false
const compactionProgress = Obz()
compactionProgress.set(COMPACTION_PROGRESS_START)
/** @type {Array<CB<any>>} */
const waitingCompaction = []
AtomicFile.readFile(statsFilename, 'utf8', function onStatsLoaded(err, json) {
if (err) {
// prettier-ignore
if (err.code !== 'ENOENT') debug('Failed loading stats file: %s', err.message)
deletedBytes = 0
} else {
try {
const stats = JSON.parse(json)
deletedBytes = stats.deletedBytes
} catch (err) {
// prettier-ignore
debug('Failed parsing stats file: %s', /** @type {Error} */ (err).message)
deletedBytes = 0
}
}
raf.stat(
/** @type {CB<{size: number}>} */ function onRAFStatDone(err, stat) {
// prettier-ignore
if (err && err.code !== 'ENOENT') debug('Failed to read %s stats: %s', filename, err.message)
const fileSize = stat ? stat.size : -1
if (fileSize <= 0) {
debug('Opened log file, which is empty')
latestBlockBuf = b4a.alloc(blockSize)
latestBlockIndex = 0
nextOffsetInBlock = 0
cache.set(0, latestBlockBuf)
lastRecOffset.set(-1)
// @ts-ignore
while (waitingLoad.length) waitingLoad.shift()()
} else {
const blockStart = fileSize - blockSize
loadLatestBlock(blockStart, function onLoadedLatestBlock(err) {
if (err) throw err
// prettier-ignore
debug('Opened log file, last record is at log offset %d, block %d', lastRecOffset.value, latestBlockIndex)
// @ts-ignore
while (waitingLoad.length) waitingLoad.shift()()
})
}
}
)
})
/**
* @param {number} blockStart
* @param {CB<void>} cb
*/
function loadLatestBlock(blockStart, cb) {
raf.read(
blockStart,
blockSize,
/** @type {CB<B4A>} */
(
function onRAFReadLastDone(err, blockBuf) {
if (err) return cb(err)
getLastGoodRecord(
blockBuf,
blockStart,
function gotLastGoodRecord(err, offsetInBlock) {
if (err) return cb(err)
latestBlockBuf = blockBuf
latestBlockIndex = blockStart / blockSize
const recSize = Record.readSize(blockBuf, offsetInBlock)
nextOffsetInBlock = offsetInBlock + recSize
lastRecOffset.set(blockStart + offsetInBlock)
cb()
}
)
}
)
)
}
/**
* @param {number} offset
*/
function getOffsetInBlock(offset) {
return offset % blockSize
}
/**
* @param {number} offset
*/
function getBlockStart(offset) {
return offset - getOffsetInBlock(offset)
}
/**
* @param {number} offset
*/
function getNextBlockStart(offset) {
return getBlockStart(offset) + blockSize
}
/**
* @param {number} offset
*/
function getBlockIndex(offset) {
return getBlockStart(offset) / blockSize
}
/** @type {Mutexify<any>} */
const writeLock = mutexify()
/**
* @template T
* @param {number} blockStart
* @param {B4A | undefined} blockBuf
* @param {T} successValue
* @param {CB<T>} cb
*/
function writeWithFSync(blockStart, blockBuf, successValue, cb) {
writeLock(function onWriteLockReleased(unlock) {
raf.write(
blockStart,
blockBuf,
function onRAFWriteDone(/** @type {Error | null} */ err) {
if (err) return unlock(cb, err)
if (raf.fd) {
fs.fsync(raf.fd, function onFSyncDone(err) {
if (err) unlock(cb, err)
else unlock(cb, null, successValue)
})
} else unlock(cb, null, successValue)
}
)
})
}
/**
* @param {B4A} blockBuf
* @param {number} badOffsetInBlock
* @param {number} blockStart
* @param {number} successValue
* @param {CB<number>} cb
*/
function fixBlock(blockBuf, badOffsetInBlock, blockStart, successValue, cb) {
// prettier-ignore
debug('Fixing a block with an invalid record at block offset %d', badOffsetInBlock)
blockBuf.fill(0, badOffsetInBlock, blockSize)
writeWithFSync(blockStart, blockBuf, successValue, cb)
}
/**
* @param {B4A} blockBuf
* @param {number} blockStart
* @param {CB<number>} cb
*/
function getLastGoodRecord(blockBuf, blockStart, cb) {
let lastGoodOffset = 0
for (let offsetInRec = 0; offsetInRec < blockSize; ) {
if (Record.isEOB(blockBuf, offsetInRec)) break
const [dataBuf, recSize, dataLength] = Record.read(blockBuf, offsetInRec)
const isLengthCorrupt = offsetInRec + recSize > blockSize
const isDataCorrupt = dataLength > 0 && !validateRecord(dataBuf)
if (isLengthCorrupt || isDataCorrupt) {
fixBlock(blockBuf, offsetInRec, blockStart, lastGoodOffset, cb)
return
}
lastGoodOffset = offsetInRec
offsetInRec += recSize
}
cb(null, lastGoodOffset)
}
/**
* @param {number} offset
* @param {CB<B4A>} cb
*/
function getBlock(offset, cb) {
const blockIndex = getBlockIndex(offset)
if (cache.has(blockIndex)) {
debug('Reading block %d at log offset %d from cache', blockIndex, offset)
const cachedBlockBuf = cache.get(blockIndex)
cb(null, cachedBlockBuf)
} else {
debug('Reading block %d at log offset %d from disc', blockIndex, offset)
const blockStart = getBlockStart(offset)
raf.read(
blockStart,
blockSize,
/** @type {CB<B4A>} */
(
function onRAFReadDone(err, blockBuf) {
if (err) return cb(err)
cache.set(blockIndex, blockBuf)
cb(null, blockBuf)
}
)
)
}
}
/**
* @param {number} offset
* @param {CB<extractCodecType<typeof codec>>} cb
*/
function get(offset, cb) {
assert(typeof latestBlockIndex === 'number', 'latestBlockIndex not set')
assert(typeof nextOffsetInBlock === 'number', 'nextOffsetInBlock not set')
const logSize = latestBlockIndex * blockSize + nextOffsetInBlock
if (typeof offset !== 'number') return cb(nanOffsetErr(offset))
if (isNaN(offset)) return cb(nanOffsetErr(offset))
if (offset < 0) return cb(negativeOffsetErr(offset))
if (offset >= logSize) return cb(outOfBoundsOffsetErr(offset, logSize))
getBlock(offset, function gotBlock(err, blockBuf) {
if (err) return cb(err)
const offsetInBlock = getOffsetInBlock(offset)
const [dataBuf, _recSize, dataLength, emptyLength] = Record.read(
blockBuf,
offsetInBlock
)
if (dataLength === 0 && emptyLength > 0) return cb(deletedRecordErr())
// @ts-ignore
cb(null, codec.decode(dataBuf))
})
}
/**
* Returns [nextOffset, decodedRecord, recordSize] where nextOffset can take 3
* forms:
* * `-1`: end of log
* * `0`: need a new block
* * `>0`: next record within block
* @param {Buffer} blockBuf
* @param {number} offset
* @param {boolean} asRaw
* @return {[number, extractCodecType<typeof codec> | B4A | null, number]}
*/
function getDataNextOffset(blockBuf, offset, asRaw = false) {
const offsetInBlock = getOffsetInBlock(offset)
const [dataBuf, recSize, dataLength, emptyLength] = Record.read(
blockBuf,
offsetInBlock
)
const nextOffsetInBlock = offsetInBlock + recSize
let nextOffset
if (Record.isEOB(blockBuf, nextOffsetInBlock)) {
if (getNextBlockStart(offset) > lastRecOffset.value) nextOffset = -1
else nextOffset = 0
} else {
nextOffset = offset + recSize
}
if (dataLength === 0 && emptyLength > 0) return [nextOffset, null, recSize]
else return [nextOffset, asRaw ? dataBuf : codec.decode(dataBuf), recSize]
}
/**
* @param {(offset: number, data: extractCodecType<typeof codec> | null, size: number) => Promise<void> | void} onNext
* @param {(error?: Error) => void} onDone
* @param {boolean} asRaw
*/
function scan(onNext, onDone, asRaw = false) {
let cursor = 0
const gotNextBlock =
/** @type {CB<B4A>} */
(
async (err, blockBuf) => {
if (err) return onDone(err)
if (isBufferZero(blockBuf)) return onDone()
while (true) {
const [offset, data, size] = getDataNextOffset(
blockBuf,
cursor,
asRaw
)
// @ts-ignore
const promise = onNext(cursor, data, size)
if (promise) await promise
if (offset === 0) {
cursor = getNextBlockStart(cursor)
getNextBlock()
return
} else if (offset === -1) {
onDone()
return
} else {
cursor = offset
}
}
}
)
function getNextBlock() {
setTimeout(getBlock, 0, cursor, gotNextBlock)
}
getNextBlock()
}
/**
* @param {number} offset
* @param {CB<void>} cb
*/
function del(offset, cb) {
if (compacting) {
cb(delDuringCompactErr())
return
}
const blockIndex = getBlockIndex(offset)
if (blocksToBeWritten.has(blockIndex)) {
onDrain(function delAfterDrained() {
del(offset, cb)
})
return
}
const gotBlockForDelete = /** @type {CB<B4A>} */ (
(err, blockBuf) => {
if (err) return cb(err)
assert(blockBuf, 'blockBuf should be defined in gotBlockForDelete')
const blockBufNow = blocksWithOverwritables.get(blockIndex) ?? blockBuf
const offsetInBlock = getOffsetInBlock(offset)
Record.overwriteAsEmpty(blockBufNow, offsetInBlock)
deletedBytes += Record.readSize(blockBufNow, offsetInBlock)
blocksWithOverwritables.set(blockIndex, blockBufNow)
scheduleFlushOverwrites()
// prettier-ignore
debug('Deleted record at log offset %d, block %d, block offset %d', offset, blockIndex, offsetInBlock)
cb()
}
)
if (blocksWithOverwritables.has(blockIndex)) {
const blockBuf = /** @type {any} */ (
blocksWithOverwritables.get(blockIndex)
)
gotBlockForDelete(null, blockBuf)
} else {
getBlock(offset, gotBlockForDelete)
}
}
/**
* @param {Uint8Array} dataBuf
* @param {number} offsetInBlock
*/
function hasNoSpaceFor(dataBuf, offsetInBlock) {
return offsetInBlock + Record.size(dataBuf) + Record.EOB_SIZE > blockSize
}
const scheduleFlushOverwrites = debounce(flushOverwrites, writeTimeout)
function flushOverwrites() {
if (blocksWithOverwritables.size === 0) {
for (const cb of waitingFlushOverwrites) cb()
waitingFlushOverwrites.length = 0
return
}
const blockIndex = blocksWithOverwritables.keys().next().value
const blockStart = blockIndex * blockSize
const blockBuf = blocksWithOverwritables.get(blockIndex)
blocksWithOverwritables.delete(blockIndex)
flushingOverwrites = true
writeWithFSync(
blockStart,
blockBuf,
null,
function flushedOverwrites(err, _) {
if (err) debug('Failed to flush overwrites with fsync: %s', err.message)
saveStats(function onSavedStats(err, _) {
// prettier-ignore
if (err) debug('Failed to save stats file after flugshing overwrites: %s', err.message)
flushingOverwrites = false
if (err) {
for (const cb of waitingFlushOverwrites) cb(err)
waitingFlushOverwrites.length = 0
return
}
flushOverwrites() // next
})
}
)
}
/**
* @param {CB<void>} cb
*/
function onOverwritesFlushed(cb) {
if (flushingOverwrites || blocksWithOverwritables.size > 0) {
waitingFlushOverwrites.push(cb)
} else cb()
}
/**
* @param {extractCodecType<typeof codec>} data
* @returns {number}
*/
function appendSingle(data) {
let encodedData = codec.encode(data)
if (typeof encodedData === 'string') encodedData = b4a.from(encodedData)
if (Record.size(encodedData) + Record.EOB_SIZE > blockSize) {
throw appendLargerThanBlockErr()
}
assert(typeof latestBlockIndex === 'number', 'latestBlockIndex not set')
assert(typeof nextOffsetInBlock === 'number', 'nextOffsetInBlock not set')
if (hasNoSpaceFor(encodedData, nextOffsetInBlock)) {
const nextBlockBuf = b4a.alloc(blockSize)
latestBlockBuf = nextBlockBuf
latestBlockIndex += 1
nextOffsetInBlock = 0
// prettier-ignore
debug('Block %d created at log offset %d to fit new record', latestBlockIndex, latestBlockIndex * blockSize)
}
// prettier-ignore
debug('Appending record at log offset %d, blockIndex %d, block offset %d', latestBlockIndex * blockSize + nextOffsetInBlock, latestBlockIndex, nextOffsetInBlock)
assert(latestBlockBuf, 'latestBlockBuf not set')
Record.write(latestBlockBuf, nextOffsetInBlock, encodedData)
cache.set(latestBlockIndex, latestBlockBuf) // update cache
const offset = latestBlockIndex * blockSize + nextOffsetInBlock
blocksToBeWritten.set(latestBlockIndex, {
blockBuf: latestBlockBuf,
offset,
})
nextOffsetInBlock += Record.size(encodedData)
scheduleWrite()
return offset
}
/**
* @param {extractCodecType<typeof codec>} data
* @param {CB<number>} cb
*/
function append(data, cb) {
if (compacting) {
waitingCompaction.push(() => append(data, cb))
return
}
let offset
try {
offset = appendSingle(data)
} catch (err) {
return cb(/** @type {any} */ (err))
}
cb(null, offset)
}
const scheduleWrite = debounce(write, writeTimeout)
function write() {
if (blocksToBeWritten.size === 0) return
const blockIndex = blocksToBeWritten.keys().next().value
const blockStart = blockIndex * blockSize
const { blockBuf, offset } =
/** @type {{ blockBuf: B4A, offset: number }} */ (
blocksToBeWritten.get(blockIndex)
)
blocksToBeWritten.delete(blockIndex)
// prettier-ignore
debug('Writing block %d of size %d at log offset %d', blockIndex, blockBuf.length, blockStart)
writingBlockIndex = blockIndex
writeWithFSync(blockStart, blockBuf, null, function onBlockWritten(err, _) {
const drainsBefore = (waitingDrain.get(blockIndex) || []).slice(0)
writingBlockIndex = -1
if (err) {
// prettier-ignore
debug('Failed to write block %d at log offset %d', blockIndex, blockStart)
throw err
} else {
lastRecOffset.set(offset)
// prettier-ignore
if (drainsBefore.length > 0) debug('Draining the waiting queue (%d functions) for block %d at log offset %d', drainsBefore.length, blockIndex, blockStart)
for (let i = 0; i < drainsBefore.length; ++i) drainsBefore[i]()
// the resumed streams might have added more to waiting
let drainsAfter = waitingDrain.get(blockIndex) || []
if (drainsBefore.length === drainsAfter.length) {
waitingDrain.delete(blockIndex)
} else if (drainsAfter.length === 0) {
waitingDrain.delete(blockIndex)
} else {
waitingDrain.set(
blockIndex,
// @ts-ignore
waitingDrain.get(blockIndex).slice(drainsBefore.length)
)
}
write() // next!
}
})
}
/**
* @param {number} offset
* @param {extractCodecType<typeof codec>} data
* @param {CB<void>} cb
*/
function overwrite(offset, data, cb) {
if (compacting) {
waitingCompaction.push(() => overwrite(offset, data, cb))
return
}
let encodedData = codec.encode(data)
if (typeof encodedData === 'string') encodedData = b4a.from(encodedData)
assert(typeof latestBlockIndex === 'number', 'latestBlockIndex not set')
assert(typeof nextOffsetInBlock === 'number', 'nextOffsetInBlock not set')
const logSize = latestBlockIndex * blockSize + nextOffsetInBlock
const blockIndex = getBlockIndex(offset)
if (typeof offset !== 'number') return cb(nanOffsetErr(offset))
if (isNaN(offset)) return cb(nanOffsetErr(offset))
if (offset < 0) return cb(negativeOffsetErr(offset))
if (offset >= logSize) return cb(outOfBoundsOffsetErr(offset, logSize))
// Get the existing record at offset
getBlock(offset, function gotBlock(err, blockBuf) {
if (err) return cb(err)
const blockBufNow = blocksWithOverwritables.get(blockIndex) ?? blockBuf
const offsetInBlock = getOffsetInBlock(offset)
const oldDataLength = Record.readDataLength(blockBufNow, offsetInBlock)
const oldEmptyLength = Record.readEmptyLength(blockBufNow, offsetInBlock)
// Make sure encodedData fits inside existing record
if (encodedData.length > oldDataLength + oldEmptyLength) {
return cb(overwriteLargerThanOld())
}
const newEmptyLength = oldDataLength - encodedData.length
deletedBytes += newEmptyLength
// write
Record.write(blockBufNow, offsetInBlock, encodedData, newEmptyLength)
blocksWithOverwritables.set(blockIndex, blockBufNow)
scheduleFlushOverwrites()
// prettier-ignore
debug('Overwrote record at log offset %d, block %d, block offset %d', offset, blockIndex, offsetInBlock)
cb()
})
}
function getTotalBytes() {
assert(typeof latestBlockIndex === 'number', 'latestBlockIndex not set')
assert(typeof nextOffsetInBlock === 'number', 'nextOffsetInBlock not set')
return latestBlockIndex * blockSize + nextOffsetInBlock
}
/**
* @param {CB<{ totalBytes: number; deletedBytes: number }>} cb
*/
function stats(cb) {
onLoad(() => {
cb(null, {
totalBytes: getTotalBytes(),
deletedBytes,
})
})()
}
/**
* @param {CB<void>} cb
*/
function saveStats(cb) {
const stats = JSON.stringify({ deletedBytes })
AtomicFile.writeFile(statsFilename, stats, 'utf8', (err, _) => {
if (err) return cb(new Error('Failed to save stats file', { cause: err }))
cb()
})
}
/** @type {CB<void>} */
function logError(err) {
if (err) console.error(err)
}
/**
* Compaction is the process of removing deleted records from the log by
* creating a new log with only the undeleted records, and then atomically
* swapping the new log for the old one.
* @param {CB<void>?} cb
*/
async function compact(cb) {
cb ??= logError
const debug2 = debug.extend('compact')
if (deletedBytes === 0) {
debug2('Skipping compaction since there are no deleted bytes')
compactionProgress.set(COMPACTION_PROGRESS_END_EMPTY)
return cb()
}
await p(onDrain)()
const [err1] = await p(onOverwritesFlushed)()
if (err1) {
// prettier-ignore
return cb(new Error('Compact failed to pre-flush overwrites', { cause: err1 }))
}
if (compacting) {
if (cb) waitingCompaction.push(cb)
return
}
compacting = true
const startCompactTimestamp = Date.now()
if (compactionProgress.value.done) {
compactionProgress.set(COMPACTION_PROGRESS_START)
}
const filenameNew = filename + '.compacting'
const [err2] = await p(fs.unlink.bind(fs))(filenameNew)
if (err2 && err2.code !== 'ENOENT') {
compacting = false
// prettier-ignore
return cb(new Error('Compact failed to get rid of previous compacting log', { cause: err2 }))
}
const rafNew = RAF(filenameNew)
/**
* @param {number} blockIndex
* @param {B4A} blockBuf
* @returns {Promise<void>}
*/
function writeBlock(blockIndex, blockBuf) {
const blockStart = blockIndex * blockSize
// prettier-ignore
debug2('Writing block %d of size %d at log offset %d', blockIndex, blockBuf.length, blockStart)
return new Promise((resolve, reject) => {
rafNew.write(
blockStart,
blockBuf,
/** @type {CB<void>} */
function onCompactRAFWriteDone(err) {
if (err) return reject(err)
if (rafNew.fd) {
fs.fsync(rafNew.fd, function onCompactFSyncDone(err) {
if (err) reject(err)
else resolve()
})
} else resolve()
}
)
})
}
// Scan the old log and write blocks on the new log
const oldTotalBytes = getTotalBytes()
const oldLastRecOffset = lastRecOffset.value
let latestBlockBufNew = b4a.alloc(blockSize)
let latestBlockIndexNew = 0
let nextOffsetInBlockNew = 0
let holesFound = 0
let timestampLastEmit = Date.now()
const err3 = await new Promise((done) => {
scan(
function compactScanningRecord(oldRecOffset, data, size) {
const now = Date.now()
if (now - timestampLastEmit > COMPACTION_PROGRESS_EMIT_INTERVAL) {
timestampLastEmit = now
const percent = oldRecOffset / oldLastRecOffset
compactionProgress.set({ percent, done: false })
}
if (!data) {
holesFound += 1
return
}
const dataBuf = /** @type {B4A} */ (/** @type {any} */ (data))
/** @type {Promise<void> | undefined} */
let promiseWriteBlock = void 0
if (hasNoSpaceFor(dataBuf, nextOffsetInBlockNew)) {
promiseWriteBlock = writeBlock(
latestBlockIndexNew,
latestBlockBufNew
)
latestBlockBufNew = b4a.alloc(blockSize)
latestBlockIndexNew += 1
nextOffsetInBlockNew = 0
// prettier-ignore
debug2('Block %d created for log offset %d to fit new record', latestBlockIndexNew, latestBlockIndexNew * blockSize)
}
Record.write(latestBlockBufNew, nextOffsetInBlockNew, dataBuf)
// prettier-ignore
debug2('Record copied into log offset %d, block %d, block offset %d', latestBlockIndexNew * blockSize + nextOffsetInBlockNew, latestBlockIndexNew, nextOffsetInBlockNew)
nextOffsetInBlockNew += Record.size(dataBuf)
return promiseWriteBlock
},
done,
true
)
})
if (err3) {
await p(rafNew.close.bind(rafNew))()
compacting = false
// prettier-ignore
return cb(new Error('Compact failed while scanning-sifting the old log', { cause: err3 }))
}
await writeBlock(latestBlockIndexNew, latestBlockBufNew)
// Swap the new log for the old one
const [[err4], [err5]] = await Promise.all([
p(raf.close.bind(raf))(),
p(rafNew.close.bind(rafNew))(),
])
if (err4 ?? err5) {
compacting = false
// prettier-ignore
return cb(new Error('Compact failed to close log files', { cause: err4 ?? err5 }))
}
const [err6] = await p(fs.rename.bind(fs))(filenameNew, filename)
if (err6) {
compacting = false
// prettier-ignore
return cb(new Error('Compact failed to replace old log with new', { cause: err6 }))
}
raf = RAF(filename)
latestBlockBuf = latestBlockBufNew
latestBlockIndex = latestBlockIndexNew
nextOffsetInBlock = nextOffsetInBlockNew
cache.clear()
const nextSince = latestBlockIndex * blockSize + nextOffsetInBlock
const sizeDiff = oldTotalBytes - getTotalBytes()
lastRecOffset.set(nextSince)
const duration = Date.now() - startCompactTimestamp
debug2('Completed in %d ms', duration)
deletedBytes = 0
const [err7] = await p(saveStats)()
if (err7) {
compacting = false
return cb(new Error('Compact failed to save stats file', { cause: err7 }))
}
compactionProgress.set({ percent: 1, done: true, sizeDiff, holesFound })
compacting = false
for (const callback of waitingCompaction) callback()
waitingCompaction.length = 0
cb()
}
/**
* @param {CB<unknown>} cb
*/
function close(cb) {
onDrain(function closeAfterHavingDrained() {
onOverwritesFlushed(function closeAfterOverwritesFlushed() {
raf.close(cb)
})
})
}
/**
* @template T
* @param {T} fn
* @returns {T}
*/
function onLoad(fn) {
const fun = /** @type {(this: null | void, ...args: Array<any> )=>void} */ (
fn
)
return /** @type {any} */ (
function waitForLogLoaded(/** @type {any[]} */ ...args) {
if (latestBlockBuf === null) waitingLoad.push(fun.bind(null, ...args))
else fun(...args)
}
)
}
/**
* @param {() => void} fn
*/
function onDrain(fn) {
if (compacting) {
waitingCompaction.push(fn)
return
}
if (blocksToBeWritten.size === 0 && writingBlockIndex === -1) fn()
else {
const latestBlockIndex = /** @type {number} */ (
blocksToBeWritten.size > 0
? last(blocksToBeWritten.keys())
: writingBlockIndex
)
const drains = waitingDrain.get(latestBlockIndex) || []
drains.push(fn)
waitingDrain.set(latestBlockIndex, drains)
}
}
/**
* @param {IterableIterator<number>} iterable
*/
function last(iterable) {
let res = null
for (let x of iterable) res = x
return res
}
return {
// Public API:
scan: onLoad(scan),
del: onLoad(del),
append: onLoad(append),
overwrite: onLoad(overwrite),
close: onLoad(close),
onDrain: onLoad(onDrain),
onOverwritesFlushed: onLoad(onOverwritesFlushed),
compact: onLoad(compact),
compactionProgress,
lastRecOffset,
stats,
// Useful for tests
_get: onLoad(get),
}
}
module.exports = Log

164
lib/log/record.js Normal file
View File

@ -0,0 +1,164 @@
const b4a = require('b4a')
/**
* @typedef {Buffer | Uint8Array} B4A
*/
/*
Binary format for a Record:
<record>
<dataLength: UInt16LE><emptyLength: UInt16LE>
<dataBuf: Arbitrary Bytes or empty Bytes>
</record>
The "Header" is the first two bytes for the dataLength.
*/
const HEADER_D = 2 // uint16
const HEADER_E = 2 // uint16
const HEADER_SIZE = HEADER_D + HEADER_E // uint16
/**
* @param {B4A} dataBuf
*/
function size(dataBuf) {
return HEADER_D + HEADER_E + dataBuf.length
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
*/
function readDataLength(blockBuf, offsetInBlock) {
const view = new DataView(
blockBuf.buffer,
blockBuf.byteOffset,
blockBuf.byteLength
)
return view.getUint16(offsetInBlock, true)
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
*/
function readEmptyLength(blockBuf, offsetInBlock) {
const view = new DataView(
blockBuf.buffer,
blockBuf.byteOffset,
blockBuf.byteLength
)
return view.getUint16(offsetInBlock + 2, true)
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
*/
function isEmpty(blockBuf, offsetInBlock) {
return (
readDataLength(blockBuf, offsetInBlock) === 0 &&
readEmptyLength(blockBuf, offsetInBlock) > 0
)
}
/**
* The "End of Block" is a special field 4-bytes-long used to mark the end of a
* block, and in practice it's like a Record header "dataLength" and
* "emptyLength" fields both with the value 0.
*
* In most cases, the end region of a block will be much more than 4 bytes of
* zero, but we want to guarantee there is at *least* 4 bytes at the end.
* @param {B4A} blockBuf
* @param {number} offsetInBlock
*/
function isEOB(blockBuf, offsetInBlock) {
return (
readDataLength(blockBuf, offsetInBlock) === 0 &&
readEmptyLength(blockBuf, offsetInBlock) === 0
)
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
*/
function readSize(blockBuf, offsetInBlock) {
const dataLength = readDataLength(blockBuf, offsetInBlock)
const emptyLength = readEmptyLength(blockBuf, offsetInBlock)
return HEADER_D + HEADER_E + dataLength + emptyLength
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
* @returns {[B4A, number, number, number]}
*/
function read(blockBuf, offsetInBlock) {
const dataLength = readDataLength(blockBuf, offsetInBlock)
const emptyLength = readEmptyLength(blockBuf, offsetInBlock)
const dataStart = offsetInBlock + HEADER_D + HEADER_E
const dataBuf = blockBuf.subarray(dataStart, dataStart + dataLength)
const size = HEADER_D + HEADER_E + dataLength + emptyLength
return [dataBuf, size, dataLength, emptyLength]
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
* @param {B4A} dataBuf
* @param {number} emptySize
*/
function write(blockBuf, offsetInBlock, dataBuf, emptySize = 0) {
const dataSize = dataBuf.length
const dataHeaderPos = offsetInBlock
const emptyHeaderPos = dataHeaderPos + HEADER_D
const dataBodyPos = emptyHeaderPos + HEADER_E
const emptyBodyPos = dataBodyPos + dataSize
// write header
{
const view = new DataView(
blockBuf.buffer,
blockBuf.byteOffset,
blockBuf.byteLength
)
view.setUint16(dataHeaderPos, dataSize, true)
if (emptySize > 0) {
view.setUint16(emptyHeaderPos, emptySize, true)
}
}
// write body
{
if (dataSize > 0) {
b4a.copy(dataBuf, blockBuf, dataBodyPos)
}
if (emptySize > 0) {
b4a.fill(blockBuf, 0, emptyBodyPos, emptyBodyPos + emptySize)
}
}
}
/**
* @param {B4A} blockBuf
* @param {number} offsetInBlock
*/
function overwriteAsEmpty(blockBuf, offsetInBlock) {
const dataLength = readDataLength(blockBuf, offsetInBlock)
write(blockBuf, offsetInBlock, b4a.alloc(0), dataLength)
}
module.exports = {
EOB_SIZE: HEADER_D + HEADER_E,
size,
readDataLength,
readEmptyLength,
readSize,
read,
write,
overwriteAsEmpty,
isEmpty,
isEOB,
}

12
lib/msg-v4/constants.js Normal file
View File

@ -0,0 +1,12 @@
module.exports = {
/** @type {'self'} */
ACCOUNT_SELF: 'self',
/** @type {'any'} */
ACCOUNT_ANY: 'any',
ACCOUNT_DOMAIN_PREFIX: 'account__',
SIGNATURE_TAG_MSG_V4: ':msg-v4:',
SIGNATURE_TAG_ACCOUNT_ADD: ':account-add:',
}

57
lib/msg-v4/get-msg-id.js Normal file
View File

@ -0,0 +1,57 @@
const b4a = require('b4a')
const crypto = require('crypto')
const base58 = require('bs58')
// @ts-ignore
const stringify = require('json-canon')
/**
* @typedef {import('./index').Msg} Msg
* @typedef {Buffer | Uint8Array} B4A
*/
/**
* @param {Msg} msg
* @returns {B4A}
*/
function getMsgHashBuf(msg) {
const metadataBuf = b4a.from(stringify(msg.metadata), 'utf8')
const metadataHash = crypto.createHash('sha512').update(metadataBuf).digest()
return b4a.from(metadataHash.subarray(0, 32))
}
/**
* @param {Msg | string} x
* @returns {string}
*/
function getMsgID(x) {
if (typeof x === 'string') {
if (x.startsWith('pzp:message/v4/')) {
const msgUri = x
const parts = msgUri.split('/')
return parts[parts.length - 1]
} else {
const msgHash = x
return msgHash
}
} else {
const msg = x
const msgHashBuf = getMsgHashBuf(msg)
return base58.encode(msgHashBuf)
}
}
/**
* @param {Msg} msg
* @returns {string}
*/
function getMsgURI(msg) {
const { account, domain } = msg.metadata
const msgHash = getMsgID(msg)
if (domain) {
return `pzp:message/v4/${account}/${domain}/${msgHash}`
} else {
return `pzp:message/v4/${account}/${msgHash}`
}
}
module.exports = { getMsgURI, getMsgID }

337
lib/msg-v4/index.js Normal file
View File

@ -0,0 +1,337 @@
const crypto = require('crypto')
const base58 = require('bs58')
const b4a = require('b4a')
// @ts-ignore
const stringify = require('json-canon')
const Keypair = require('pzp-keypair')
// @ts-ignore
const union = require('set.prototype.union')
const { stripAccount } = require('./strip')
const isMoot = require('./is-moot')
const { getMsgID } = require('./get-msg-id')
const representData = require('./represent-data')
const {
validateDomain,
validateData,
validate,
validateShape,
validateMsgID,
} = require('./validation')
const Tangle = require('./tangle')
const {
ACCOUNT_SELF,
ACCOUNT_ANY,
SIGNATURE_TAG_MSG_V4,
} = require('./constants')
const { isEmptyObject } = require('./util')
/**
* @typedef {import('pzp-keypair').Keypair} Keypair
*/
/**
* @template [T=any]
* @typedef {{
* data: T;
* metadata: {
* dataHash: string | null;
* dataSize: number;
* account: string | (typeof ACCOUNT_SELF) | (typeof ACCOUNT_ANY);
* accountTips: Array<string> | null;
* tangles: {
* [tangleID in string]: TangleMetadata
* };
* domain: string;
* v: 4;
* };
* sigkey: string;
* sig: string;
* }} Msg
*/
/**
* @template [T=any]
* @typedef {{
* data: T;
* metadata: {
* dataHash: string;
* dataSize: number;
* account: string;
* accountTips: Array<string>;
* tangles: {
* [tangleID in string]: TangleMetadata
* };
* domain: string;
* v: 4;
* };
* sigkey: string;
* sig: string;
* }} FeedMsg
*/
/**
* @typedef {Iterator<Msg> & {values: () => Iterator<Msg>}} MsgIter
*
* @typedef {Buffer | Uint8Array} B4A
*
* @typedef {{
* depth: number;
* prev: Array<string>;
* }} TangleMetadata
*
* @typedef {AccountAdd | AccountDel} AccountData
*
* @typedef {'add' | 'del' | 'internal-encryption' | 'external-encryption'} AccountPower
*
* @typedef {{
* purpose: 'shs-and-sig';
* algorithm: 'ed25519';
* bytes: string;
* }} ShsAndSigKey
* @typedef {{
* purpose: 'sig';
* algorithm: 'ed25519';
* bytes: string;
* }} SigKey
* @typedef {{
* purpose: 'external-encryption';
* algorithm: 'x25519-xsalsa20-poly1305';
* bytes: string;
* }} ExternalEncryptionKey;
*
* @typedef {ShsAndSigKey | SigKey | ExternalEncryptionKey} AccountKey
*
* @typedef {{
* action: 'add',
* key: AccountKey;
* nonce?: string;
* consent?: string;
* powers?: Array<AccountPower>;
* }} AccountAdd
*
* @typedef {{
* action: 'del',
* key: AccountKey;
* }} AccountDel
*
* @typedef {{
* data: any;
* domain: string;
* keypair: Keypair;
* account: string | (typeof ACCOUNT_SELF) | (typeof ACCOUNT_ANY);
* accountTips: Array<string> | null;
* tangles: {
* [tangleID in string]: Tangle
* };
* }} CreateOpts
*/
/**
* @param {string} accountId
* @param {string} domain
* @returns {string}
*/
function getMootID(accountId, domain) {
/** @type {Msg} */
const msg = {
data: null,
metadata: {
dataHash: null,
dataSize: 0,
account: stripAccount(accountId),
accountTips: null,
tangles: {},
domain,
v: 4,
},
sigkey: '',
sig: '',
}
return getMsgID(msg)
}
/**
* @param {Pick<CreateOpts, 'data'>} opts
* @returns {B4A}
*/
function toPlaintextBuffer(opts) {
return b4a.from(stringify(opts.data), 'utf8')
}
/**
* @param {CreateOpts} opts
* @returns {Msg}
*/
function create(opts) {
let err
if ((err = validateDomain(opts.domain))) throw err
if (!opts.tangles) throw new Error('opts.tangles is required')
const [dataHash, dataSize] = representData(opts.data)
const account = opts.account
const accountTips = opts.accountTips ? opts.accountTips.sort() : null
const tangles = /** @type {Msg['metadata']['tangles']} */ ({})
for (const rootID in opts.tangles) {
if ((err = validateMsgID(rootID))) throw err
const tangle = opts.tangles[rootID]
const depth = tangle.maxDepth + 1
const lipmaaSet = tangle.getLipmaaSet(depth)
const prev = [...union(lipmaaSet, tangle.tips)].sort()
tangles[rootID] = { depth, prev }
}
/** @type {Msg} */
const msg = {
data: opts.data,
metadata: {
dataHash,
dataSize,
account,
accountTips,
tangles,
domain: opts.domain,
v: 4,
},
sigkey: opts.keypair.public,
sig: '',
}
if ((err = validateData(msg))) throw err
const signableBuf = b4a.from(
SIGNATURE_TAG_MSG_V4 + stringify(msg.metadata),
'utf8'
)
msg.sig = Keypair.sign(opts.keypair, signableBuf)
return msg
}
/**
* @param {string} id
* @param {string} domain
* @param {Keypair} keypair
* @returns {Msg}
*/
function createMoot(id, domain, keypair) {
let err
if ((err = validateDomain(domain))) throw err
/** @type {Msg} */
const msg = {
data: null,
metadata: {
dataHash: null,
dataSize: 0,
account: id,
accountTips: null,
tangles: {},
domain,
v: 4,
},
sigkey: keypair.public,
sig: '',
}
const signableBuf = b4a.from(
SIGNATURE_TAG_MSG_V4 + stringify(msg.metadata),
'utf8'
)
msg.sig = Keypair.sign(keypair, signableBuf)
return msg
}
function getRandomNonce() {
return base58.encode(crypto.randomBytes(32))
}
/**
* @param {Keypair} keypair
* @param {string} domain
* @param {string | (() => string)} nonce
* @returns {Msg}
*/
function createAccount(keypair, domain, nonce = getRandomNonce) {
/** @type {AccountData} */
const data = {
action: 'add',
key: {
purpose: 'shs-and-sig',
algorithm: 'ed25519',
bytes: keypair.public,
},
nonce: typeof nonce === 'function' ? nonce() : nonce,
powers: ['add', 'del', 'external-encryption', 'internal-encryption'],
}
return create({
data,
account: ACCOUNT_SELF,
accountTips: null,
keypair,
tangles: {},
domain,
})
}
/**
* @param {Msg} msg
* @returns {Msg}
*/
function erase(msg) {
return { ...msg, data: null }
}
/**
* @param {B4A} plaintextBuf
* @param {Msg} msg
* @returns {Msg}
*/
function fromPlaintextBuffer(plaintextBuf, msg) {
return { ...msg, data: JSON.parse(plaintextBuf.toString('utf-8')) }
}
/**
* @param {Msg} msg
*/
function isRoot(msg) {
return isEmptyObject(msg.metadata.tangles)
}
/**
* @template T
* @param {Msg<T>} msg
* @returns {msg is FeedMsg<T>}
*/
function isFeedMsg(msg) {
const { account, accountTips } = msg.metadata
return Array.isArray(accountTips) && account !== 'self' && account !== 'any'
}
/**
* @param {any} x
* @returns {x is Msg}
*/
function isMsg(x) {
return !validateShape(x)
}
module.exports = {
isMsg,
isMoot,
isRoot,
isFeedMsg,
getMsgID,
getMootID,
create,
createMoot,
createAccount,
erase,
stripAccount,
toPlaintextBuffer,
fromPlaintextBuffer,
Tangle,
validate,
}

27
lib/msg-v4/is-moot.js Normal file
View File

@ -0,0 +1,27 @@
const { stripAccount } = require('./strip')
const { isEmptyObject } = require('./util')
/**
* @typedef {import('.').Msg} Msg
*/
/**
* @param {Msg} msg
* @param {string | 0} id
* @param {string | 0} findDomain
*/
function isMoot(msg, id = 0, findDomain = 0) {
const { dataHash, dataSize, account, accountTips, tangles, domain } =
msg.metadata
if (msg.data !== null) return false
if (dataHash !== null) return false
if (dataSize !== 0) return false
if (account === 'self') return false
if (id !== 0 && account !== stripAccount(id)) return false
if (accountTips !== null) return false
if (!isEmptyObject(tangles)) return false
if (findDomain !== 0 && domain !== findDomain) return false
return true
}
module.exports = isMoot

View File

@ -0,0 +1,23 @@
const crypto = require('crypto')
const b4a = require('b4a')
const base58 = require('bs58')
// @ts-ignore
const stringify = require('json-canon')
/**
* @typedef {Buffer | Uint8Array} B4A
*/
/**
* @param {any} data
* @returns {[string, number]}
*/
function representData(data) {
const dataBuf = b4a.from(stringify(data), 'utf8')
const fullHash = crypto.createHash('sha512').update(dataBuf).digest()
const dataHash = base58.encode(fullHash.subarray(0, 32))
const dataSize = dataBuf.length
return [dataHash, dataSize]
}
module.exports = representData

17
lib/msg-v4/strip.js Normal file
View File

@ -0,0 +1,17 @@
/**
* @typedef {import('.').Msg} Msg
*/
/**
* @param {string} accountId
* @returns {string}
*/
function stripAccount(accountId) {
if (accountId.startsWith('pzp:account/v4/') === false) return accountId
const withoutPrefix = accountId.replace('pzp:account/v4/', '')
return withoutPrefix.split('/')[0]
}
module.exports = {
stripAccount,
}

348
lib/msg-v4/tangle.js Normal file
View File

@ -0,0 +1,348 @@
const isMoot = require('./is-moot')
/**
* @typedef {import("./index").Msg} Msg
*/
/**
* @param {number} n
*/
function lipmaa(n) {
let m = 1
let po3 = 3
let u = n
// find k such that (3^k - 1)/2 >= n
while (m < n) {
po3 *= 3
m = (po3 - 1) / 2
}
// find longest possible backjump
po3 /= 3
if (m !== n) {
while (u !== 0) {
m = (po3 - 1) / 2
po3 /= 3
u %= m
}
if (m !== po3) {
po3 = m
}
}
return n - po3
}
/**
* @param {string} a
* @param {string} b
* @returns number
*/
function compareMsgIDs(a, b) {
return a.localeCompare(b)
}
class Tangle {
/**
* @type {string}
*/
#rootID
/**
* @type {Msg | undefined}
*/
#rootMsg
/**
* @type {Set<string>}
*/
#tips = new Set()
/**
* @type {Map<string, Array<string>>}
*/
#prev = new Map()
/**
* @type {Map<string, number>}
*/
#depth = new Map()
/**
* @type {Map<number, Array<string>>}
*/
#perDepth = new Map()
/**
* @type {number}
*/
#maxDepth
/**
* @param {string} rootID
*/
constructor(rootID) {
this.#rootID = rootID
this.#maxDepth = 0
}
/**
* @param {string} msgID
* @param {Msg} msg
*/
add(msgID, msg) {
// Add the root msg
if (msgID === this.#rootID && !this.#rootMsg) {
this.#tips.add(msgID)
this.#perDepth.set(0, [msgID])
this.#depth.set(msgID, 0)
this.#rootMsg = msg
return
}
// Add affix msg
const tangles = msg.metadata.tangles
if (msgID !== this.#rootID && tangles[this.#rootID]) {
if (this.#depth.has(msgID)) return
let hasSuccessor = false
for (const prevs of this.#prev.values()) {
if (prevs.includes(msgID)) {
hasSuccessor = true
break
}
}
if (!hasSuccessor) {
this.#tips.add(msgID)
}
const prev = tangles[this.#rootID].prev
for (const p of prev) {
this.#tips.delete(p)
}
this.#prev.set(msgID, prev)
const depth = tangles[this.#rootID].depth
if (depth > this.#maxDepth) this.#maxDepth = depth
this.#depth.set(msgID, depth)
const atDepth = this.#perDepth.get(depth) ?? []
atDepth.push(msgID)
atDepth.sort(compareMsgIDs)
this.#perDepth.set(depth, atDepth)
return
}
}
/**
* @param {number} depth
* @returns {Array<string>}
*/
#getAllAtDepth(depth) {
return this.#perDepth.get(depth) ?? []
}
/**
* @returns {Array<string>}
*/
topoSort() {
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return []
}
const sorted = []
const max = this.#maxDepth
for (let i = 0; i <= max; i++) {
const atDepth = this.#getAllAtDepth(i)
for (const msgID of atDepth) {
sorted.push(msgID)
}
}
return sorted
}
/**
* @returns {Set<string>}
*/
get tips() {
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return new Set()
}
return this.#tips
}
/**
* @param {number} depth
* @returns {Set<string>}
*/
getLipmaaSet(depth) {
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return new Set()
}
const lipmaaDepth = lipmaa(depth + 1) - 1
return new Set(this.#getAllAtDepth(lipmaaDepth))
}
/**
* @param {string} msgID
* @returns {boolean}
*/
has(msgID) {
return this.#depth.has(msgID)
}
/**
* @param {string} msgID
* @returns {number}
*/
getDepth(msgID) {
return this.#depth.get(msgID) ?? -1
}
#isFeed() {
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return false
}
return isMoot(this.#rootMsg)
}
get id() {
return this.#rootID
}
get mootDetails() {
if (!this.#isFeed()) return null
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return null
}
const { account, domain } = this.#rootMsg.metadata
return { account, domain, id: this.#rootID }
}
/**
* @returns {'feed' | 'account' | 'weave' | null}
*/
get type() {
if (!this.#rootMsg) {
console.trace(new Error(`Tangle "${this.#rootID}" is missing root message`))
return null
}
if (this.#isFeed()) return 'feed'
if (this.#rootMsg.metadata.account === 'self') return 'account'
return 'weave'
}
get root() {
if (!this.#rootMsg) {
console.trace(new Error(`Tangle "${this.#rootID}" is missing root message`))
return null
}
return this.#rootMsg
}
/**
* @param {string} msgID
*/
shortestPathToRoot(msgID) {
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return []
}
const path = []
let current = msgID
let lastPrev = undefined
while (true) {
const prev = this.#prev.get(current)
if (!prev) break
if (prev === lastPrev) {
// prettier-ignore
console.trace(new Error(`Tangle "${this.#rootID}" has a cycle or lacking a trail to root`))
return null
} else {
lastPrev = prev
}
let minDepth = /** @type {number} */ (this.#depth.get(current))
let min = current
for (const p of prev) {
const d = /** @type {number} */ (this.#depth.get(p))
if (typeof d === 'number' && d < minDepth) {
minDepth = d
min = p
} else if (d === minDepth && compareMsgIDs(p, min) < 0) {
min = p
}
}
path.push(min)
current = min
}
return path
}
/**
* Of the given msgs, filter out those that are succeeded by others, returning
* an array that contains only the "preceeded by no one else" msgs.
* @param {Array<string>} msgIDs
* @return {Array<string>}
*/
getMinimumAmong(msgIDs) {
const minimum = new Set(msgIDs)
for (const i of msgIDs) {
for (const j of msgIDs) {
if (this.precedes(i, j)) {
minimum.delete(j)
}
}
}
return [...minimum]
}
/**
* @param {string} msgAID
* @param {string} msgBID
*/
precedes(msgAID, msgBID) {
if (!this.#rootMsg) {
console.trace(`Tangle "${this.#rootID}" is missing root message`)
return false
}
if (msgAID === msgBID) return false
if (msgBID === this.#rootID) return false
let toCheck = [msgBID]
const checked = new Set()
while (toCheck.length > 0) {
const checking = /** @type {string} */ (toCheck.shift())
checked.add(checking)
const prev = this.#prev.get(checking)
if (!prev) continue
if (prev.includes(msgAID)) {
checked.clear()
return true
}
toCheck.push(...prev.filter((p) => !checked.has(p)))
}
checked.clear()
return false
}
get size() {
return this.#depth.size
}
get maxDepth() {
return this.#maxDepth
}
debug() {
let str = ''
const max = this.#maxDepth
for (let i = 0; i <= max; i++) {
const atDepth = this.#getAllAtDepth(i)
str += `Depth ${i}: ${atDepth.join(', ')}\n`
}
return str
}
}
module.exports = Tangle

13
lib/msg-v4/util.js Normal file
View File

@ -0,0 +1,13 @@
/**
* @param {any} obj
*/
function isEmptyObject(obj) {
for (const _key in obj) {
return false
}
return true
}
module.exports = {
isEmptyObject,
}

370
lib/msg-v4/validation.js Normal file
View File

@ -0,0 +1,370 @@
const b4a = require('b4a')
const base58 = require('bs58')
const Keypair = require('pzp-keypair')
// @ts-ignore
const stringify = require('json-canon')
const Tangle = require('./tangle')
const representData = require('./represent-data')
const isMoot = require('./is-moot')
const {
SIGNATURE_TAG_MSG_V4,
ACCOUNT_SELF,
ACCOUNT_ANY,
} = require('./constants')
/**
* @typedef {import('.').Msg} Msg
* @typedef {import('.').AccountData} AccountData
*/
/**
* @param {Msg} msg
* @returns {string | undefined}
*/
function validateShape(msg) {
if (!msg || typeof msg !== 'object') {
return 'invalid msg: not an object\n' + JSON.stringify(msg)
}
if (!('data' in msg)) {
return 'invalid msg: must have data\n' + JSON.stringify(msg)
}
if (!msg.metadata || typeof msg.metadata !== 'object') {
return 'invalid msg: must have metadata\n' + JSON.stringify(msg)
}
if (!('dataHash' in msg.metadata)) {
return 'invalid msg: must have metadata.dataHash\n' + JSON.stringify(msg)
}
if (!('dataSize' in msg.metadata)) {
return 'invalid msg: must have metadata.dataSize\n' + JSON.stringify(msg)
}
if (!('account' in msg.metadata)) {
return 'invalid msg: must have metadata.account\n' + JSON.stringify(msg)
}
if (!('accountTips' in msg.metadata)) {
return 'invalid msg: must have metadata.accountTips\n' + JSON.stringify(msg)
}
if (!('tangles' in msg.metadata)) {
return 'invalid msg: must have metadata.tangles\n' + JSON.stringify(msg)
}
if (!('domain' in msg.metadata)) {
return 'invalid msg: must have metadata.domain\n' + JSON.stringify(msg)
}
if (msg.metadata.v !== 4) {
return 'invalid msg: must have metadata.v=4\n' + JSON.stringify(msg)
}
if (typeof msg.sig !== 'string') {
return 'invalid msg: must have sig\n' + JSON.stringify(msg)
}
return undefined
}
/**
* @param {Msg} msg
* @returns {string | undefined}
*/
function validateSigkey(msg) {
const { sigkey } = msg
if (typeof sigkey !== 'string') {
// prettier-ignore
return `invalid msg: sigkey "${sigkey}" should have been a string\n` + JSON.stringify(msg)
}
try {
const sigkeyBuf = base58.decode(sigkey)
if (sigkeyBuf.length !== 32) {
// prettier-ignore
return `invalid msg: decoded "sigkey" should be 32 bytes but was ${sigkeyBuf.length}\n` + JSON.stringify(msg)
}
} catch (err) {
// prettier-ignore
return `invalid msg: sigkey "${sigkey}" should have been a base58 string\n` + JSON.stringify(msg)
}
return undefined
}
/**
*
* @param {Msg} msg
* @param {Tangle} tangle
* @param {Set<string>} sigkeys
* @returns {string | undefined}
*/
function validateSigkeyAndAccount(msg, tangle, sigkeys) {
if (tangle.type === 'feed' || tangle.type === 'weave') {
if (msg.metadata.account === ACCOUNT_SELF) {
// prettier-ignore
return `invalid msg: account "${msg.metadata.account}" cannot be "self" in a feed tangle\n` + JSON.stringify(msg)
}
if (msg.metadata.account !== ACCOUNT_ANY && !sigkeys.has(msg.sigkey)) {
// prettier-ignore
return `invalid msg: sigkey "${msg.sigkey}" should have been one of "${[...sigkeys]}" from the account "${msg.metadata.account}"\n` + JSON.stringify(msg)
}
} else if (tangle.type === 'account') {
if (msg.metadata.account !== ACCOUNT_SELF) {
// prettier-ignore
return `invalid msg: account "${msg.metadata.account}" should have been "self" in an account tangle\n` + JSON.stringify(msg)
}
if (msg.metadata.accountTips !== null) {
// prettier-ignore
return `invalid msg: accountTips "${msg.metadata.accountTips}" should have been null in an account tangle\n` + JSON.stringify(msg)
}
} else if (tangle.type === null) {
return "Cannot validate tangle of unknown type"
}
return undefined
}
/**
* @param {string} str
* @returns {string | undefined}
*/
function validateMsgID(str) {
try {
const hashBuf = b4a.from(base58.decode(str))
if (hashBuf.length !== 32) {
// prettier-ignore
return `invalid msgID "${str}": should have 32 bytes but has ${hashBuf.length}`
}
} catch (err) {
return `invalid msgID "${str}": should have been a base58 string`
}
return undefined
}
/**
* @param {Msg} msg
* @returns {string | undefined}
*/
function validateSignature(msg) {
const { sig } = msg
if (typeof sig !== 'string') {
// prettier-ignore
return `invalid msg: sig "${sig}" should have been a string\n` + JSON.stringify(msg)
}
let sigBuf
try {
sigBuf = b4a.from(base58.decode(sig))
if (sigBuf.length !== 64) {
// prettier-ignore
return `invalid msg: sig should be 64 bytes but was ${sigBuf.length}\n` + JSON.stringify(msg)
}
} catch (err) {
// prettier-ignore
return `invalid msg: sig "${sig}" should have been a base58 string\n` + JSON.stringify(msg)
}
const signableBuf = b4a.from(
SIGNATURE_TAG_MSG_V4 + stringify(msg.metadata),
'utf8'
)
const keypair = {
curve: /** @type {const} */ ('ed25519'),
public: msg.sigkey,
}
const verified = Keypair.verify(keypair, signableBuf, sig)
if (!verified) {
return 'invalid msg: sig is invalid\n' + JSON.stringify(msg)
}
return undefined
}
/**
* @typedef {NonNullable<Tangle['mootDetails']>} MootDetails
*/
/**
* @param {Msg} msg
* @param {Tangle} tangle
* @param {string} tangleID
* @returns
*/
function validateTangle(msg, tangle, tangleID) {
if (!msg.metadata.tangles[tangleID]) {
// prettier-ignore
return `invalid msg: must have metadata.tangles.${tangleID}\n` + JSON.stringify(msg)
}
const { depth, prev } = msg.metadata.tangles[tangleID]
if (!prev || !Array.isArray(prev)) {
// prettier-ignore
return `invalid msg: prev "${prev}" should have been an array\n` + JSON.stringify(msg)
}
if (!Number.isSafeInteger(depth) || depth <= 0) {
// prettier-ignore
return `invalid msg: depth "${depth}" should have been a positive integer\n` + JSON.stringify(msg)
}
if (tangle.type === 'feed') {
const { account, domain } = /** @type {MootDetails} */ (tangle.mootDetails)
if (domain !== msg.metadata.domain) {
// prettier-ignore
return `invalid msg: domain "${msg.metadata.domain}" should have been feed domain "${domain}"\n` + JSON.stringify(msg)
}
if (account !== msg.metadata.account) {
// prettier-ignore
return `invalid msg: account "${msg.metadata.account}" should have been feed account "${account}"\n` + JSON.stringify(msg)
}
} else if (tangle.type === null) {
return "Unknown tangle type"
}
let lastPrev = null
let minDiff = Infinity
let countPrevUnknown = 0
for (const p of prev) {
if (typeof p !== 'string') {
// prettier-ignore
return `invalid msg: prev item "${p}" should have been a string\n` + JSON.stringify(msg)
}
if (p.startsWith('pzp:')) {
// prettier-ignore
return `invalid msg: prev item "${p}" is a URI, but should have been a hash\n` + JSON.stringify(msg)
}
if (lastPrev !== null) {
if (p === lastPrev) {
// prettier-ignore
return `invalid msg: prev "${prev}" contains duplicates\n` + JSON.stringify(msg)
}
if (p < lastPrev) {
// prettier-ignore
return `invalid msg: prev "${prev}" should have been alphabetically sorted\n` + JSON.stringify(msg)
}
}
lastPrev = p
if (!tangle.has(p)) {
countPrevUnknown += 1
continue
}
const prevDepth = tangle.getDepth(p)
const diff = depth - prevDepth
if (diff <= 0) {
// prettier-ignore
return `invalid msg: depth of prev "${p}" should have been lower than this message's depth\n` + JSON.stringify(msg)
}
if (diff < minDiff) minDiff = diff
}
if (countPrevUnknown === prev.length) {
return 'invalid msg: all prev are locally unknown\n' + JSON.stringify(msg)
}
if (countPrevUnknown === 0 && minDiff !== 1) {
// prettier-ignore
return `invalid msg: depth must be the largest prev depth plus one\n` + JSON.stringify(msg)
}
return undefined
}
/**
* @param {Msg} msg
* @param {string} msgID
* @param {string} tangleID
*/
function validateTangleRoot(msg, msgID, tangleID) {
if (msgID !== tangleID) {
// prettier-ignore
return `invalid msg: tangle root "${msgID}" must match tangleID "${tangleID}"\n` + JSON.stringify(msg)
}
if (msg.metadata.tangles[tangleID]) {
// prettier-ignore
return `invalid msg: tangle root "${tangleID}" must not have self tangle data\n` + JSON.stringify(msg)
}
return undefined
}
/**
* @param {string} domain
*/
function validateDomain(domain) {
if (!domain || typeof domain !== 'string') {
// prettier-ignore
return `invalid domain: "${domain}" (${typeof domain}) should have been a string`
}
if (domain.length > 100) {
return `invalid domain: "${domain}" is 100+ characters long`
}
if (domain.length < 3) {
return `invalid domain: "${domain}" is shorter than 3 characters`
}
if (/[^a-zA-Z0-9_]/.test(domain)) {
// prettier-ignore
return `invalid domain: "${domain}" contains characters other than a-z, A-Z, 0-9, or _`
}
return undefined
}
/**
* @param {Msg} msg
*/
function validateData(msg) {
const { data } = msg
if (data === null) {
return
}
if (Array.isArray(data)) {
// prettier-ignore
return `invalid msg: data "${data}" must not be an array\n` + JSON.stringify(msg)
}
if (typeof data !== 'object' && typeof data !== 'string') {
// prettier-ignore
return `invalid msg: data "${data}" must be an object or a string` + JSON.stringify(msg)
}
return undefined
}
/**
* @param {Msg} msg
*/
function validateDataSizeHash(msg) {
const { dataHash: actualHash, dataSize: actualSize } = msg.metadata
if (!Number.isSafeInteger(actualSize) || actualSize < 0) {
// prettier-ignore
return `invalid msg: dataSize ${actualSize} should have been an unsigned integer\n` + JSON.stringify(msg)
}
if (msg.data === null) return
const [expectedHash, expectedSize] = representData(msg.data)
if (actualHash !== expectedHash) {
// prettier-ignore
return `invalid msg: metadata.dataHash "${actualHash}" should have been "${expectedHash}"\n` + JSON.stringify(msg)
}
if (expectedSize !== msg.metadata.dataSize) {
// prettier-ignore
return `invalid msg: metadata.dataSize ${actualSize} should have been "${expectedSize}"\n` + JSON.stringify(msg)
}
return undefined
}
/**
* @param {Msg} msg
* @param {Tangle} tangle
* @param {Set<string>} sigkeys
* @param {string} msgID
* @param {string} rootID
*/
function validate(msg, tangle, sigkeys, msgID, rootID) {
let err
if ((err = validateShape(msg))) return err
if ((err = validateSigkey(msg))) return err
if ((err = validateData(msg))) return err
if (tangle.type === 'feed' && isMoot(msg)) return // nothing else to check
if (tangle.type === null) return "Missing tangle type when validating msg"
if ((err = validateDataSizeHash(msg))) return err
if ((err = validateDomain(msg.metadata.domain))) return err
if ((err = validateSigkeyAndAccount(msg, tangle, sigkeys))) return err
if (msgID === rootID) {
if ((err = validateTangleRoot(msg, msgID, rootID))) return err
} else {
if ((err = validateTangle(msg, tangle, rootID))) return err
}
if ((err = validateSignature(msg))) return err
return undefined
}
module.exports = {
validateDomain,
validateData,
validateShape,
validate,
validateMsgID,
}

View File

@ -1,21 +0,0 @@
class ReadyGate {
#waiting
#ready
constructor() {
this.#waiting = new Set()
this.#ready = false
}
onReady(cb) {
if (this.#ready) cb()
else this.#waiting.add(cb)
}
setReady() {
this.#ready = true
for (const cb of this.#waiting) cb()
this.#waiting.clear()
}
}
module.exports = { ReadyGate }

49
lib/utils/doneable.js Normal file
View File

@ -0,0 +1,49 @@
/**
* @template T
* @typedef {import('../index').CB<T>} CB
*/
/**
* @template T
* @typedef {[] | [Error] | [null, T]} Args
*/
/**
* @template T
*/
class Doneable {
#waiting
#done
/** @type {Args<T> | null} */
#args
constructor() {
this.#waiting = new Set()
this.#done = false
this.#args = null
}
/**
* @param {CB<T>} cb
*/
onDone(cb) {
// @ts-ignore
if (this.#done) cb(...this.#args)
else this.#waiting.add(cb)
}
/**
* @param {Args<T>=} args
*/
done(args) {
this.#done = true
this.#args = args ?? []
for (const cb of this.#waiting) cb(...this.#args)
this.#waiting.clear()
}
get isDone() {
return this.#done
}
}
module.exports = Doneable

View File

@ -1,12 +1,20 @@
{
"name": "ppppp-db",
"version": "0.0.1",
"description": "Default ppppp database",
"main": "index.js",
"name": "pzp-db",
"version": "1.0.4",
"description": "Default PZP database",
"homepage": "https://codeberg.org/pzp/pzp-db",
"repository": {
"type": "git",
"url": "git@codeberg.org:pzp/pzp-db.git"
},
"author": "Andre Staltz <contact@staltz.com>",
"license": "MIT",
"type": "commonjs",
"main": "lib/index.js",
"files": [
"*.js",
"lib/**/*.js"
"lib/**/*"
],
"types": "types/index.d.ts",
"engines": {
"node": ">=16"
},
@ -14,46 +22,55 @@
".": {
"require": "./lib/index.js"
},
"./feed-v1": {
"require": "./lib/feed-v1/index.js"
"./msg-v4": {
"require": "./lib/msg-v4/index.js"
},
"./db-tangle": {
"require": "./lib/db-tangle.js"
}
},
"type": "commonjs",
"author": "Andre Staltz <contact@staltz.com>",
"license": "MIT",
"homepage": "https://github.com/staltz/ppppp-db",
"repository": {
"type": "git",
"url": "git@github.com:staltz/ppppp-db.git"
},
"dependencies": {
"async-append-only-log": "^4.3.10",
"blake3": "^2.1.7",
"bs58": "^5.0.0",
"json-canon": "^1.0.0",
"obz": "^1.1.0",
"promisify-4loc": "^1.0.0",
"push-stream": "^11.2.0",
"set.prototype.union": "^1.0.2",
"ssb-uri2": "^2.4.1"
"@alloc/quick-lru": "^5.2.0",
"atomic-file-rw": "~0.3.0",
"b4a": "~1.6.4",
"blake3": "~2.1.7",
"bs58": "~5.0.0",
"debug": "^4.3.0",
"is-buffer-zero": "^1.0.0",
"json-canon": "~1.0.0",
"lodash.debounce": "~4.0.8",
"multicb": "~1.2.2",
"mutexify": "~1.4.0",
"obz": "~1.1.0",
"polyraf": "^1.1.0",
"pzp-keypair": "^1.0.0",
"promisify-4loc": "~1.0.0",
"promisify-tuple": "~1.2.0",
"pull-stream": "^3.7.0",
"push-stream": "~11.2.0",
"set.prototype.union": "~1.0.2"
},
"devDependencies": {
"@types/b4a": "^1.6.0",
"@types/pull-stream": "^3.6.7",
"c8": "^7.11.0",
"flumecodec": "~0.0.1",
"husky": "^4.3.0",
"pzp-caps": "^1.0.0",
"prettier": "^2.6.2",
"pretty-quick": "^3.1.3",
"rimraf": "^4.4.0",
"secret-stack": "^6.4.1",
"ssb-bendy-butt": "^1.0.0",
"secret-handshake-ext": "0.0.10",
"secret-stack": "8.0.0",
"ssb-box": "^1.0.1",
"ssb-caps": "^1.1.0",
"ssb-classic": "^1.1.0",
"ssb-keys": "^8.5.0",
"tap-arc": "^0.3.5",
"tape": "^5.6.3"
"typescript": "^5.1.3"
},
"scripts": {
"test": "tape 'test/**/*.test.js' | tap-arc --bail",
"clean-check": "tsc --build --clean",
"prepublishOnly": "npm run clean-check && tsc --build",
"postpublish": "npm run clean-check",
"test": "npm run clean-check && node --test",
"test-verbose": "VERBOSE=1 npm run test",
"format-code": "prettier --write \"(lib|test)/**/*.js\"",
"format-code-staged": "pretty-quick --staged --pattern \"(lib|test)/**/*.js\"",
"coverage": "c8 --reporter=lcov npm run test"

View File

@ -1,25 +1,493 @@
# Msg V4
Background: https://github.com/ssbc/ssb2-discussion-forum/issues/24
## Terminology
- **Hash** = base58 encoded string of the first 32 bytes of a sha512 hash
- **Msg** = `{data,metadata,sigkey,sig}` published by a peer
- **Msg ID** = `hash(msg.metadata)`
- **Tangle** = a single-root DAG of msgs that can be replicated by peers
- **Root** = the origin msg of a tangle
- **Tangle Tips** = tangle msgs that are not yet referenced by any other msg in the tangle
- **Tangle ID** = Msg ID of the tangle's root msg
- **Account** = tangle with msgs that add (or remove?) cryptographic keys
- **Account ID** = tangle ID of the account tangle
- **Feed** = tangle with msgs authored by (any sigkey in) an account
- **Moot** = the root of a feed, a msg that is deterministically predictable and empty, so to allow others to pre-know its msg ID, and thus the feed ID
- **Feed ID** = ID of the moot of a feed (Msg ID of the feed's root msg)
JSON
```typescript
interface Msg {
data: Record<string, any> | string | null // a plaintext object, or ciphertext string, or null
metadata: {
account: string | 'self' | 'any' // msg ID of account root, or the string 'self', or the string 'any'
accountTips: Array<string> | null // list (of unique items sorted lexicographically) of msg IDs of account tangle tips, or null
dataHash: string | null // hash of the `data` object serialized
dataSize: number // byte size (unsigned integer) of the `data` object serialized
domain: string // alphanumeric string, at least 3 chars, max 100 chars
tangles: {
// for each tangle this msg belongs to, identified by the tangle's ID
[tangleID: string]: {
depth: number // maximum distance (positive integer) from this msg to the root
prev: Array<MsgID> // list (of unique items sorted lexicographically) of msg IDs of existing msgs
}
}
v: 4 // hard-coded at 4, indicates the version of the feed format
}
sigkey: Sigkey // base58 encoded string for the author's public key
sig: Signature // Signs the `metadata` object
}
```
**Depth:** we NEED this field because it is the most reliable way of calculating lipmaa distances between msgs, in the face of sliced replication. For example, given that older messages (except the certificate pool) would be deleted, the "graph depth" calculation for a msg may change over time, but we need a way of keeping this calculation stable and deterministic.
## Account tangle msgs
Msgs in an account tangle are special because they have empty `account` and `accountTips` fields.
```typescript
interface Msg {
data: AccountData
metadata: {
account: 'self' // MUST be the string 'self'
accountTips: null // MUST be null
dataHash: string
dataSize: number
domain: string // alphanumeric string, must start with "account__"
tangles: {
[accountTangleID: string]: {
depth: number
prev: Array<MsgID>
}
}
v: 4
}
sigkey: Sigkey
sig: Signature
}
type AccountData = AccountAdd | AccountDel
// (if key is sig) "add" means this key can validly add more keys to the account
// (if key is sig) "del" means this key can validly revoke keys from the account
// (if key is shs) "internal-encryption" means this peer can get symmetric key
// (if key is shs) "external-encryption" means this peer can get asymmetric key
type AccountPower = 'add' | 'del' | 'internal-encryption' | 'external-encryption'
type AccountAdd = {
action: 'add'
key: Key
nonce?: string // nonce required only on the account tangle's root
consent?: string // base58 encoded signature of the string `:account-add:<ID>` where `<ID>` is the account's ID, required only on non-root msgs
accountPowers?: Array<AccountPower> // list of powers granted to this key, defaults to []
}
type AccountDel = {
action: 'del'
key: Key
}
type Key =
| {
purpose: 'shs-and-sig' // secret-handshake and digital signatures
algorithm: 'ed25519' // libsodium crypto_sign_detached
bytes: string // base58 encoded string for the public key
}
| {
purpose: 'external-encryption' // asymmetric encryption
algorithm: 'x25519-xsalsa20-poly1305' // libsodium crypto_box_easy
bytes: string // base58 encoded string of the public key
}
| {
purpose: 'sig' // secret-handshake and digital signatures
algorithm: 'ed25519' // libsodium crypto_sign_detached
bytes: string // base58 encoded string for the public key
}
```
Examples of `AccountData`:
- Registering the first public key:
```json
{
"action": "add",
"key": {
"purpose": "shs-and-sig",
"algorithm": "ed25519",
"bytes": "3JrJiHEQzRFMzEqWawfBgq2DSZDyihP1NHXshqcL8pB9"
},
"nonce": "6GHR1ZFFSB3C5qAGwmSwVH8f7byNo8Cqwn5PcyG3qDvS"
}
```
- Revoking a public key:
```json
{
"action": "del",
"key": {
"purpose": "shs-and-sig",
"algorithm": "ed25519",
"bytes": "3JrJiHEQzRFMzEqWawfBgq2DSZDyihP1NHXshqcL8pB9"
}
}
```
## Feed root
The root msg for a feed is special, its `metadata` is predictable and can be constructed by any peer. It is a data-less msg with the following shape:
```typescript
interface Msg {
data: null // MUST be null
metadata: {
dataHash: null // MUST be null
dataSize: 0 // MUST be 0
account: string // MUST be an ID
accountTips: null // MUST be null
tangles: {} // MUST be empty object
domain: string
v: 4
}
sigkey: Sigkey
sig: Signature
}
```
Thus, given a `account` and a `domain`, any peer can construct the `metadata` part of the feed root msg, and thus can derive the "msg ID" for the root based on that `metadata`.
Given the root msg ID, any peer can thus refer to the feed tangle, because the root msg ID is the tangle ID for the feed tangle.
Note also that _any peer_ can construct the root msg and sign it! Which renders the signatures for feed roots meaningless and ignorable. Thus the name "moot".
## Prev links
A msg can refer to 0 or more prev msgs. The prev links are used to build the tangle.
The `prev` array for a tangle should list:
- All current "tips" (msgs that are not yet listed inside any `prev`) of this tangle
- All msgs that are at the previous "lipmaa" depth relative to this `depth`
## JSON serialization
Whenever we need to serialize any JSON in the context of creating a Msg V4 message, we follow the "JSON Canonicalization Scheme" (JSC) defined by [RFC 8785](https://tools.ietf.org/html/rfc8785).
A serialized msg must not be larger than 65535 UTF-8 bytes.
# Msg V3
Background: https://github.com/ssbc/ssb2-discussion-forum/issues/24
## Terminology
- **Msg** = `{data,metadata,pubkey,sig}` published by a peer
- **Msg ID** = `hash(msg.metadata)`
- **Tangle** = a single-root DAG of msgs that can be replicated by peers
- **Tangle Root** = the origin msg of a tangle
- **Tangle Tips** = tangle msgs that are not yet referenced by any other msg in the tangle
- **Tangle ID** = Msg hash of the tangle's root msg
- **Account tangle** = tangle with msgs that add (or remove?) asymmetric-crypto public keys
- **Account ID** = tangle ID of the account tangle
- **Feed** = tangle with msgs authored by (any pubkey in) an account
- **Feed root** = a msg that is deterministically predictable and empty, so to allow others to pre-know its hash
- **Feed ID** = ID of a feed (Msg ID of the feed's root msg)
JSON
```typescript
interface Msg {
data: Record<string, any> | string | null // a plaintext object, or ciphertext string, or null
metadata: {
account: string | 'self' | 'any' // blake3 hash of an account tangle root msg, or the string 'self', or 'any'
accountTips: Array<string> | null // list of blake3 hashes of account tangle tips, or null
dataHash: DataHash | null // blake3 hash of the `content` object serialized
dataSize: number // byte size (unsigned integer) of the `content` object serialized
domain: string // alphanumeric string, at least 3 chars, max 100 chars
tangles: {
// for each tangle this msg belongs to, identified by the tangle's root
[rootMsgHash: string]: {
depth: number // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash> // list of msg hashes of existing msgs, unique set and ordered alphabetically
}
}
v: 3 // hard-coded at 3, indicates the version of the feed format
}
pubkey: Pubkey // base58 encoded string for the author's public key
sig: Signature // base58 encoded string of the signature of the UTF8 string ":msg-v4:<METADATA>" where `<METADATA>` is the msg.metadata object serialized
}
```
**Depth:** we NEED this field because it is the most reliable way of calculating lipmaa distances between msgs, in the face of sliced replication. For example, given that older messages (except the certificate pool) would be deleted, the "graph depth" calculation for a msg may change over time, but we need a way of keeping this calculation stable and deterministic.
## Account tangle msgs
Msgs in an account tangle are special because they have empty `account` and `accountTips` fields.
```typescript
interface Msg {
data: AccountData
metadata: {
account: 'self' // MUST be the string 'self'
accountTips: null // MUST be null
dataHash: DataHash
dataSize: number
domain: string // alphanumeric string, must start with "account__"
tangles: {
[accountTangleID: string]: {
depth: number // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash> // list of msg hashes of existing msgs, unique set and ordered alphabetically
}
}
v: 3
}
sigkey: Pubkey
sig: Signature
}
type AccountData = AccountAdd | AccountDel
// (if key is sig) "add" means this key can validly add more keys to the account
// (if key is sig) "del" means this key can validly revoke keys from the account
// (if key is shs) "internal-encryption" means this peer can get symmetric key
// (if key is shs) "external-encryption" means this peer can get asymmetric key
type AccountPower = 'add' | 'del' | 'internal-encryption' | 'external-encryption'
type AccountAdd = {
action: 'add'
key: Key
nonce?: string // nonce required only on the account tangle's root
consent?: string // base58 encoded signature of the string `:account-add:<ID>` where `<ID>` is the account's ID, required only on non-root msgs
accountPowers?: Array<AccountPower> // list of powers granted to this key, defaults to []
}
type AccountDel = {
action: 'del'
key: Key
}
type Key =
| {
purpose: 'shs-and-sig' // secret-handshake and digital signatures
algorithm: 'ed25519' // libsodium crypto_sign_detached
bytes: string // base58 encoded string for the public key
}
| {
purpose: 'external-encryption' // asymmetric encryption
algorithm: 'x25519-xsalsa20-poly1305' // libsodium crypto_box_easy
bytes: string // base58 encoded string of the public key
}
| {
purpose: 'sig' // secret-handshake and digital signatures
algorithm: 'ed25519' // libsodium crypto_sign_detached
bytes: string // base58 encoded string for the public key
}
```
Examples of `AccountData`:
- Registering the first signing pubkey:
```json
{
"action": "add",
"key": {
"purpose": "shs-and-external-signature",
"algorithm": "ed25519",
"bytes": "3JrJiHEQzRFMzEqWawfBgq2DSZDyihP1NHXshqcL8pB9"
},
"nonce": "6GHR1ZFFSB3C5qAGwmSwVH8f7byNo8Cqwn5PcyG3qDvS"
}
```
- Revoking a signing pubkey:
```json
{
"action": "del",
"key": {
"purpose": "shs-and-external-signature",
"algorithm": "ed25519",
"bytes": "3JrJiHEQzRFMzEqWawfBgq2DSZDyihP1NHXshqcL8pB9"
}
}
```
## Feed root
The root msg for a feed is special, its `metadata` is predictable and can be constructed by any peer. It is a data-less msg with the following shape:
```typescript
interface Msg {
data: null // MUST be null
metadata: {
dataHash: null // MUST be null
dataSize: 0 // MUST be 0
account: string // MUST be an ID
accountTips: null // MUST be null
tangles: {} // MUST be empty object
domain: string
v: 2
}
pubkey: Pubkey
sig: Signature
}
```
Thus, given a `account` and a `domain`, any peer can construct the `metadata` part of the feed root msg, and thus can derive the "msg ID" for the root based on that `metadata`.
Given the root msg ID, any peer can thus refer to the feed tangle, because the root msg ID is the tangle ID for the feed tangle.
Note also that _any peer_ can construct the root msg and sign it! Which renders the signatures for feed roots meaningless and ignorable.
## Prev links
A msg can refer to 0 or more prev msgs. The prev links are used to build the tangle.
The `prev` array for a tangle should list:
- All current "tips" (msgs that are not yet listed inside any `prev`) of this tangle
- All msgs that are at the previous "lipmaa" depth relative to this `depth`
## JSON serialization
Whenever we need to serialize any JSON in the context of creating a Feed V1 message, we follow the "JSON Canonicalization Scheme" (JSC) defined by [RFC 8785](https://tools.ietf.org/html/rfc8785).
A serialized msg must not be larger than 65535 UTF-8 bytes.
# Msg V2
Background: https://github.com/ssbc/ssb2-discussion-forum/issues/24
## Terminology
- **Msg** = published data that is signed and shareable
- **Msg ID** = hash(msg.metadata)
- **Tangle** = any single-root DAG of msgs that can be replicated by peers
- **Tangle Root** = the origin msg of a tangle
- **Tangle Tips** = tangle msgs that are not yet referenced by any other msg in the tangle
- **Tangle ID** = Msg ID of the tangle's root msg
- **Identity tangle** = tangle with msgs that add (or remove?) public keys used for signing msgs
- **Group** = (mutable) set of public keys, implemented by an identity tangle
- **Group ID** = ID of an identity tangle (Msg Id of the identity tangle's root msg)
- **Feed** = tangle with msgs authored by any pubkey in a group
- **Feed root** = a msg that is deterministically predictable and empty, so to allow others to pre-know its hash
- **Feed ID** = ID of a feed (Msg ID of the feed's root msg)
JSON
```typescript
interface Msg {
data: any | null // any object, or null
metadata: {
dataHash: ContentHash | null // blake3 hash of the `content` object serialized
dataSize: number // byte size (unsigned integer) of the `content` object serialized
group: string | null // blake3 hash of a group tangle root msg, or null
groupTips: Array<string> | null // list of blake3 hashes of group tangle tips, or null
tangles: {
// for each tangle this msg belongs to, identified by the tangle's root
[rootMsgHash: string]: {
depth: number // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash> // list of msg hashes of existing msgs, unique set and ordered alphabetically
}
}
type: string // alphanumeric string, at least 3 chars, max 100 chars
v: 2 // hard-coded at 2, indicates the version of the feed format
}
pubkey: Pubkey // base58 encoded string for the author's public key
sig: Signature // Signs the `metadata` object
}
```
## Identity tangle msgs
Msgs in an identity tangle are special because they have empty `group` and `groupTips` fields.
```typescript
interface Msg {
data: {
add: string // pubkey being added to the group
nonce?: string // nonce required only on the identity tangle's root
}
metadata: {
dataHash: ContentHash
dataSize: number
group: null // MUST be null
groupTips: null // MUST be null
tangles: {
[identityTangleId: string]: {
depth: number // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash> // list of msg hashes of existing msgs, unique set and ordered alphabetically
}
}
type: 'group' // MUST be 'group'
v: 2
}
pubkey: Pubkey
sig: Signature
}
```
## Feed root
The root msg for a feed is special, its `metadata` is predictable and can be constructed by any peer. It is a data-less msg with the following shape:
```typescript
interface Msg {
data: null // MUST be null
metadata: {
dataHash: null // MUST be null
dataSize: 0 // MUST be 0
group: string // MUST be a group ID
groupTips: null // MUST be null
tangles: {} // MUST be empty object
type: string
v: 2
}
pubkey: Pubkey
sig: Signature
}
```
Thus, given a `group` and a `type`, any peer can construct the `metadata` part of the feed root msg, and thus can derive the "msg ID" for the root based on that `metadata`.
Given the root msg ID, any peer can thus refer to the feed tangle, because the root msg ID is the tangle ID for the feed tangle.
Note also that _any peer_ can construct the root msg and sign it! Which renders the signatures for feed roots meaningless and ignorable.
## Prev links
A msg can refer to 0 or more prev msgs. The prev links are used to build the tangle.
The `prev` array for a tangle should list:
- All current "tips" (msgs that are not yet listed inside any `prev`) of this tangle
- All msgs that are at the previous "lipmaa" depth relative to this `depth`
## JSON serialization
Whenever we need to serialize any JSON in the context of creating a Feed V1 message, we follow the "JSON Canonicalization Scheme" (JSC) defined by [RFC 8785](https://tools.ietf.org/html/rfc8785).
# Feed V1
JSON
```typescript
interface Msg {
content: any | null, // any object, or null
content: any | null // any object, or null
metadata: {
hash: ContentHash, // blake3 hash of the `content` object serialized
size: number, // byte size (unsigned integer) of the `content` object serialized
hash: ContentHash // blake3 hash of the `content` object serialized
size: number // byte size (unsigned integer) of the `content` object serialized
tangles: {
// for each tangle this msg belongs to, identified by the tangle's root
[rootMsgHash: string]: {
depth: number, // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash>, // list of msg hashes of existing msgs, unique set and ordered alphabetically
},
},
type: string, // alphanumeric string, at least 3 chars, max 100 chars
v: 1, // hard-coded at 1, indicates the version of the feed format
who: Pubkey, // base58 encoded string for the author's public key
},
sig: Signature, // Signs the `metadata` object
depth: number // maximum distance (positive integer) from this msg to the root
prev: Array<MsgHash> // list of msg hashes of existing msgs, unique set and ordered alphabetically
}
}
type: string // alphanumeric string, at least 3 chars, max 100 chars
v: 1 // hard-coded at 1, indicates the version of the feed format
who: Pubkey // base58 encoded string for the author's public key
}
sig: Signature // Signs the `metadata` object
}
```

292
test/account-add.test.js Normal file
View File

@ -0,0 +1,292 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const p = require('node:util').promisify
const os = require('node:os')
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const MsgV4 = require('../lib/msg-v4')
const DIR = path.join(os.tmpdir(), 'pzp-db-account-add')
rimraf.sync(DIR)
test('account.add()', async (t) => {
await t.test('Basic usage', async (t) => {
const keypair1 = Keypair.generate('ed25519', 'alice')
const keypair2 = Keypair.generate('ed25519', 'bob')
const peer = createPeer({ keypair: keypair1, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({
keypair: keypair1,
subdomain: 'person',
})
assert.equal(
await p(peer.db.account.has)({ account, keypair: keypair2 }),
false
)
const consent = peer.db.account.consent({ account, keypair: keypair2 })
const accountRec1 = await p(peer.db.account.add)({
account,
keypair: keypair2,
consent,
powers: ['external-encryption'],
})
assert.ok(accountRec1, 'accountRec1 exists')
const { id, msg } = accountRec1
assert.ok(account, 'id exists')
assert.deepEqual(
msg.data,
{
action: 'add',
key: {
purpose: 'sig',
algorithm: 'ed25519',
bytes: keypair2.public,
},
consent,
powers: ['external-encryption'],
},
'msg.data.add NEW KEY'
)
assert.equal(msg.metadata.account, 'self', 'msg.metadata.account')
assert.equal(msg.metadata.accountTips, null, 'msg.metadata.accountTips')
assert.equal(msg.metadata.domain, 'account__person', 'msg.metadata.domain')
assert.deepEqual(
msg.metadata.tangles,
{ [account]: { depth: 1, prev: [account] } },
'msg.metadata.tangles'
)
assert.equal(msg.sigkey, keypair1.public, 'msg.sigkey OLD KEY')
assert.equal(
await p(peer.db.account.has)({ account, keypair: keypair2 }),
true
)
await p(peer.close)()
})
await t.test('keypair with no "add" powers cannot add', async (t) => {
rimraf.sync(DIR)
const keypair1 = Keypair.generate('ed25519', 'alice')
const keypair2 = Keypair.generate('ed25519', 'bob')
const keypair3 = Keypair.generate('ed25519', 'carol')
const peer1 = createPeer({ keypair: keypair1, path: DIR })
await peer1.db.loaded()
const id = await p(peer1.db.account.create)({
keypair: keypair1,
subdomain: 'account',
})
const msg1 = await p(peer1.db.get)(id)
const { msg: msg2 } = await p(peer1.db.account.add)({
account: id,
keypair: keypair2,
powers: [],
})
assert.equal(msg2.data.key.bytes, keypair2.public)
assert.equal(
await p(peer1.db.account.has)({ account: id, keypair: keypair2 }),
true
)
await p(peer1.close)()
rimraf.sync(DIR)
const peer2 = createPeer({ keypair: keypair2, path: DIR })
await peer2.db.loaded()
await p(peer2.db.add)(msg1, id)
await p(peer2.db.add)(msg2, id)
// Test author-side power validation
assert.rejects(
p(peer2.db.account.add)({
account: id,
keypair: keypair3,
powers: [],
}),
/signing keypair does not have the "add" power/
)
// Make the author disobey power validation
const { msg: msg3 } = await p(peer2.db.account.add)({
account: id,
keypair: keypair3,
powers: [],
_disobey: true,
})
assert.equal(msg3.data.key.bytes, keypair3.public)
await p(peer2.close)()
rimraf.sync(DIR)
const peer1again = createPeer({ keypair: keypair1, path: DIR })
await peer1again.db.loaded()
await p(peer1again.db.add)(msg1, id) // re-add because lost during rimraf
await p(peer1again.db.add)(msg2, id) // re-add because lost during rimraf
// Test replicator-side power validation
assert.rejects(
p(peer1again.db.add)(msg3, id),
/add\(\) failed to verify msg/
)
await p(peer1again.close)()
})
await t.test('publish with a key in the account', async (t) => {
rimraf.sync(DIR)
const keypair1 = Keypair.generate('ed25519', 'alice')
const keypair2 = Keypair.generate('ed25519', 'bob')
let peer = createPeer({ keypair: keypair1, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({
keypair: keypair1,
subdomain: 'person',
})
const accountMsg0 = await p(peer.db.get)(account)
// Consent is implicitly created because keypair2 has .private
const accountRec1 = await p(peer.db.account.add)({
account,
keypair: keypair2,
})
const postRec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'hello' },
keypair: keypair2,
})
assert.equal(postRec.msg.data.text, 'hello', 'post text correct')
const mootRec = await p(peer.db.feed.findMoot)(account, 'post')
assert.ok(mootRec, 'posts moot exists')
const recs = []
for await (rec of peer.db.records()) {
recs.push(rec)
}
assert.equal(recs.length, 4, '4 records')
const [_accountRec0, _accountRec1, postsRoot, _post] = recs
assert.deepEqual(_accountRec0.msg, accountMsg0, 'accountMsg0')
assert.deepEqual(_accountRec1.msg, accountRec1.msg, 'accountMsg1')
assert.deepEqual(
postsRoot.msg.metadata,
{
dataHash: null,
dataSize: 0,
account,
accountTips: null,
tangles: {},
domain: 'post',
v: 4,
},
'postsRoot'
)
assert.deepEqual(_post.msg, postRec.msg, 'postMsg')
await p(peer.close)()
// Re-load as Carol, add the msgs to validate them
rimraf.sync(DIR)
const keypair3 = Keypair.generate('ed25519', 'carol')
const carol = createPeer({ keypair: keypair3, path: DIR })
await carol.db.loaded()
await p(carol.db.add)(accountMsg0, account)
await p(carol.db.add)(accountRec1.msg, account)
await p(carol.db.add)(postsRoot.msg, mootRec.id)
await p(carol.db.add)(postRec.msg, mootRec.id)
// t.pass('carol added all msgs successfully')
await p(carol.close)()
})
await t.test(
"Can't publish with a key if the key has been del'd",
async () => {
rimraf.sync(DIR)
const keypair1 = Keypair.generate('ed25519', 'alice')
const keypair2 = Keypair.generate('ed25519', 'bob')
let peer = createPeer({ keypair: keypair1, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({
keypair: keypair1,
subdomain: 'person',
})
const accountMsg0 = await p(peer.db.get)(account)
const consent = peer.db.account.consent({ account, keypair: keypair2 })
const accountRec1 = await p(peer.db.account.add)({
account,
keypair: keypair2,
consent,
powers: ['external-encryption'],
})
const goodRec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'potato' },
keypair: keypair2,
})
const postMootRec = await p(peer.db.feed.findMoot)(account, 'post')
const delRec = await p(peer.db.account.del)({
account,
keypair: keypair2,
})
const badRec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'potato2' },
keypair: keypair2,
})
// Re-load as Carol, add the msgs to validate them
rimraf.sync(DIR)
const keypair3 = Keypair.generate('ed25519', 'carol')
const carol = createPeer({ keypair: keypair3, path: DIR })
await carol.db.loaded()
await p(carol.db.add)(accountMsg0, account)
await p(carol.db.add)(accountRec1.msg, account)
await p(carol.db.add)(postMootRec.msg, postMootRec.id)
await p(carol.db.add)(goodRec.msg, postMootRec.id)
await p(carol.db.add)(delRec.msg, account)
await assert.rejects(
p(carol.db.add)(badRec.msg, postMootRec.id),
/add\(\) failed to verify msg/,
"Adding msg with del'd keypair is supposed to fail"
)
await p(carol.close)()
}
)
})

145
test/account-create.test.js Normal file
View File

@ -0,0 +1,145 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'pzp-db-account-create')
rimraf.sync(DIR)
test('account.create() ', async (t) => {
await t.test('create with just "domain"', async (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({
subdomain: 'person',
_nonce: 'MYNONCE',
})
assert.ok(account, 'accountRec0 exists')
const msg = await p(peer.db.get)(account)
assert.deepEqual(
msg.data,
{
action: 'add',
key: {
purpose: 'shs-and-sig',
algorithm: 'ed25519',
bytes: keypair.public,
},
nonce: 'MYNONCE',
powers: ['add', 'del', 'external-encryption', 'internal-encryption'],
},
'msg.data'
)
assert.equal(msg.metadata.account, 'self', 'msg.metadata.account')
assert.equal(msg.metadata.accountTips, null, 'msg.metadata.accountTips')
assert.deepEqual(
Object.keys(msg.metadata.tangles),
[],
'msg.metadata.tangles'
)
assert.equal(msg.sigkey, keypair.public, 'msg.sigkey')
await p(peer.close)()
})
await t.test('create with "keypair" and "domain"', async (t) => {
rimraf.sync(DIR)
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({
keypair,
subdomain: 'person',
})
assert.ok(account, 'account created')
const msg = await p(peer.db.get)(account)
assert.equal(msg.data.key.bytes, keypair.public, 'msg.data')
assert.equal(msg.metadata.account, 'self', 'msg.metadata.account')
assert.equal(msg.metadata.accountTips, null, 'msg.metadata.accountTips')
assert.deepEqual(
Object.keys(msg.metadata.tangles),
[],
'msg.metadata.tangles'
)
assert.equal(msg.sigkey, keypair.public, 'msg.sigkey')
await p(peer.close)()
})
await t.test('account.find() can find', async (t) => {
rimraf.sync(DIR)
const keypair = Keypair.generate('ed25519', 'alice')
const subdomain = 'person'
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({ keypair, subdomain })
assert.ok(account, 'account created')
const found = await p(peer.db.account.find)({ keypair, subdomain })
assert.equal(found, account, 'found')
await p(peer.close)()
})
await t.test('account.findOrCreate() can find', async (t) => {
rimraf.sync(DIR)
const keypair = Keypair.generate('ed25519', 'alice')
const subdomain = 'person'
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({ keypair, subdomain })
assert.ok(account, 'account created')
const found = await p(peer.db.account.findOrCreate)({ keypair, subdomain })
assert.equal(found, account, 'found')
await p(peer.close)()
})
await t.test('account.findOrCreate() can create', async (t) => {
rimraf.sync(DIR)
const keypair = Keypair.generate('ed25519', 'alice')
const subdomain = 'person'
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
let gotError = false
await p(peer.db.account.find)({ keypair, subdomain }).catch((err) => {
assert.equal(err.cause, 'ENOENT')
gotError = true
})
assert.ok(gotError, 'account not found')
const account = await p(peer.db.account.findOrCreate)({
keypair,
subdomain,
})
assert.ok(account, 'account created')
const msg = await p(peer.db.get)(account)
assert.equal(msg.data.key.bytes, keypair.public, 'msg.data')
assert.equal(msg.metadata.account, 'self', 'msg.metadata.account')
assert.equal(msg.metadata.accountTips, null, 'msg.metadata.accountTips')
assert.deepEqual(
Object.keys(msg.metadata.tangles),
[],
'msg.metadata.tangles'
)
assert.equal(msg.sigkey, keypair.public, 'msg.sigkey')
await p(peer.close)()
})
})

View File

@ -1,44 +1,139 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const FeedV1 = require('../lib/feed-v1')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../lib/msg-v4')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-add')
const DIR = path.join(os.tmpdir(), 'pzp-db-add')
rimraf.sync(DIR)
test('add()', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const accountMsg0 = MsgV4.createAccount(keypair, 'person', 'aliceNonce')
const id = MsgV4.getMsgID(accountMsg0)
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
await t.test('basic use case', async () => {
// Moot can be added without validating its account & sigkey
const moot = MsgV4.createMoot(id, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const recMoot = await p(peer.db.add)(moot, mootID)
assert.equal(recMoot.msg.metadata.dataSize, 0, 'moot added')
const recRoot = await p(peer.db.add)(rootMsg, rootHash)
t.equals(recRoot.msg.metadata.size, 0, 'root msg added')
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(recRoot.hash, recRoot.msg)
await p(peer.db.add)(accountMsg0, id)
const inputMsg = FeedV1.create({
keys,
type: 'post',
content: { text: 'This is the first post!' },
const tangle = new MsgV4.Tangle(mootID)
tangle.add(recMoot.id, recMoot.msg)
const inputMsg = MsgV4.create({
keypair,
domain: 'post',
data: { text: 'This is the first post!' },
account: id,
accountTips: [id],
tangles: {
[rootHash]: tangle,
[mootID]: tangle,
},
})
const rec = await p(peer.db.add)(inputMsg, rootHash)
t.equal(rec.msg.content.text, 'This is the first post!')
const rec = await p(peer.db.add)(inputMsg, null) // tangleID implicit
assert.equal(rec.msg.data.text, 'This is the first post!')
const stats = await p(peer.db.log.stats)()
assert.deepEqual(stats, { totalBytes: 1662, deletedBytes: 0 })
})
await t.test('concurrent add of the same msg appends just one', async () => {
const moot = MsgV4.createMoot(id, 'whatever', keypair)
const mootID = MsgV4.getMsgID(moot)
await Promise.all([
p(peer.db.add)(moot, mootID),
p(peer.db.add)(moot, mootID),
])
const stats = await p(peer.db.log.stats)()
assert.deepEqual(stats, { totalBytes: 2072, deletedBytes: 0 })
})
await t.test('dataful msg replacing a dataless msg', async (t) => {
const moot = MsgV4.createMoot(id, 'something', keypair)
const mootID = MsgV4.getMsgID(moot)
await p(peer.db.add)(moot, mootID)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1Dataful = MsgV4.create({
keypair,
account: id,
accountTips: [id],
domain: 'something',
data: { text: 'first' },
tangles: {
[mootID]: tangle,
},
})
const msg1Dataless = { ...msg1Dataful, data: null }
const msg1ID = MsgV4.getMsgID(msg1Dataful)
tangle.add(msg1ID, msg1Dataful)
const msg2 = MsgV4.create({
keypair,
account: id,
accountTips: [id],
domain: 'something',
data: { text: 'second' },
tangles: {
[mootID]: tangle,
},
})
const msg2ID = MsgV4.getMsgID(msg2)
await p(peer.db.add)(msg1Dataless, mootID)
await p(peer.db.add)(msg2, mootID)
// We expect there to be 3 msgs: moot, dataless msg1, dataful msg2
{
const ids = []
const texts = []
for await (const rec of peer.db.records()) {
if (rec.msg.metadata.domain === 'something') {
ids.push(rec.id)
texts.push(rec.msg.data?.text)
}
}
assert.deepEqual(ids, [mootID, msg1ID, msg2ID])
assert.deepEqual(texts, [undefined, undefined, 'second'])
const stats = await p(peer.db.log.stats)()
assert.deepEqual(stats, { totalBytes: 3718, deletedBytes: 0 })
}
await p(peer.db.add)(msg1Dataful, mootID)
// We expect there to be 3 msgs: moot, (deleted) dataless msg1, dataful msg2
// and dataful msg1 appended at the end
{
const ids = []
const texts = []
for await (const rec of peer.db.records()) {
if (rec.msg.metadata.domain === 'something') {
ids.push(rec.id)
texts.push(rec.msg.data?.text)
}
}
assert.deepEqual(ids, [mootID, msg2ID, msg1ID])
assert.deepEqual(texts, [undefined, 'second', 'first'])
const stats = await p(peer.db.log.stats)()
assert.deepEqual(stats, { totalBytes: 4340, deletedBytes: 610 })
}
})
await p(peer.close)(true)
})

View File

@ -1,133 +0,0 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-create')
rimraf.sync(DIR)
const keys = generateKeypair('alice')
const bobKeys = generateKeypair('bob')
let peer
test('setup', async (t) => {
peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
await peer.db.loaded()
})
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
let msgHash1
let rec1
let msgHash2
test('create()', async (t) => {
rec1 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 1st post' },
})
t.equal(rec1.msg.content.text, 'I am 1st post', 'msg1 text correct')
t.equal(
rec1.msg.metadata.tangles[rootHash].depth,
1,
'msg1 tangle depth correct'
)
t.deepEquals(
rec1.msg.metadata.tangles[rootHash].prev,
[rootHash],
'msg1 tangle prev correct'
)
msgHash1 = FeedV1.getMsgHash(rec1.msg)
const rec2 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 2nd post' },
})
t.equal(rec2.msg.content.text, 'I am 2nd post', 'msg2 text correct')
t.equal(
rec2.msg.metadata.tangles[rootHash].depth,
2,
'msg2 tangle depth correct'
)
t.deepEquals(
rec2.msg.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2 tangle prev correct'
)
msgHash2 = FeedV1.getMsgHash(rec2.msg)
})
test('add() forked then create() merged', async (t) => {
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
tangle.add(rec1.hash, rec1.msg)
const msg3 = FeedV1.create({
keys,
type: 'post',
content: { text: '3rd post forked from 1st' },
tangles: {
[rootHash]: tangle,
},
})
const rec3 = await p(peer.db.add)(msg3, rootHash)
const msgHash3 = FeedV1.getMsgHash(rec3.msg)
const rec4 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 4th post' },
})
t.ok(rec4, '4th post created')
t.deepEquals(
rec4.msg.metadata.tangles[rootHash].prev,
[rootHash, msgHash2, msgHash3],
'msg4 prev is root, msg2 and msg3'
)
})
test('create() encrypted with box', async (t) => {
const recEncrypted = await p(peer.db.create)({
type: 'post',
content: { text: 'I am chewing food', recps: [peer.id] },
encryptionFormat: 'box',
})
t.equal(typeof recEncrypted.msg.content, 'string')
t.true(recEncrypted.msg.content.endsWith('.box'), '.box')
const msgDecrypted = peer.db.get(recEncrypted.hash)
t.equals(msgDecrypted.content.text, 'I am chewing food')
})
test('create() with tangles', async (t) => {
const recA = await p(peer.db.create)({
type: 'comment',
content: { text: 'I am root' },
})
t.equal(recA.msg.content.text, 'I am root', 'root text correct')
const recB = await p(peer.db.create)({
type: 'comment',
content: { text: 'I am comment 1' },
tangles: [recA.hash],
keys: bobKeys,
})
t.equal(recB.msg.metadata.tangles[recA.hash].depth, 1, 'tangle depth 1')
t.deepEquals(
recB.msg.metadata.tangles[recA.hash].prev,
[recA.hash],
'tangle prev'
)
})
test('teardown', (t) => {
peer.close(t.end)
})

View File

@ -1,53 +1,91 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const AAOL = require('async-append-only-log')
const push = require('push-stream')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Log = require('../lib/log')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-del')
const DIR = path.join(os.tmpdir(), 'pzp-db-del')
rimraf.sync(DIR)
test('del', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.call(null, { keys, path: DIR })
test('del()', async (t) => {
const peer = createPeer({
keypair: Keypair.generate('ed25519', 'alice'),
path: DIR,
})
await peer.db.loaded()
const msgHashes = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.create)({
type: 'post',
content: { text: 'm' + i },
const id = await p(peer.db.account.create)({
subdomain: 'person',
_nonce: 'alice',
})
msgHashes.push(rec.hash)
const msgIDs = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'm' + i },
})
msgIDs.push(rec.id)
}
const before = []
for (const msg of peer.db.msgs()) {
if (msg.content) before.push(msg.content.text)
{
const texts = []
for await (const msg of peer.db.msgs()) {
if (msg.data && msg.metadata.account?.length > 4) {
texts.push(msg.data.text)
}
}
assert.deepEqual(
texts,
['m0', 'm1', 'm2', 'm3', 'm4'],
'msgs before the delete'
)
}
t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], 'msgs before the delete')
const stats1 = await p(peer.db.log.stats)()
assert.deepEqual(
stats1,
{ totalBytes: 4158, deletedBytes: 0 },
'stats before delete and compact'
)
await p(peer.db.del)(msgHashes[2])
await p(peer.db.del)(msgIDs[2])
await p(peer.db.del)(msgIDs[3])
const after = []
for (const msg of peer.db.msgs()) {
if (msg.content) after.push(msg.content.text)
{
const texts = []
for await (const msg of peer.db.msgs()) {
if (msg.data && msg.metadata.account?.length > 4) {
texts.push(msg.data.text)
}
}
assert.deepEqual(texts, ['m0', 'm1', 'm4'], 'msgs after the delete')
}
t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], 'msgs after the delete')
await p(peer.db.log.compact)()
assert('compacted')
// Delete 4 so we can test that its log offset was updated post-compaction
await p(peer.db.del)(msgIDs[4])
{
const texts = []
for await (const msg of peer.db.msgs()) {
if (msg.data && msg.metadata.account?.length > 4) {
texts.push(msg.data.text)
}
}
assert.deepEqual(texts, ['m0', 'm1'], 'msgs when deleted after compacted')
}
await p(peer.close)(true)
const log = AAOL(path.join(DIR, 'db.bin'), {
const log = Log(path.join(DIR, 'db', 'log'), {
cacheSize: 1,
blockSize: 64 * 1024,
codec: {
@ -62,11 +100,10 @@ test('del', async (t) => {
const persistedMsgs = await new Promise((resolve, reject) => {
let persistedMsgs = []
log.stream({ offsets: true, values: true, sizes: true }).pipe(
push.drain(
function drainEach({ offset, value, size }) {
if (value) {
persistedMsgs.push(value.msg)
log.scan(
function drainEach(offset, rec, size) {
if (rec) {
persistedMsgs.push(rec.msg)
}
},
function drainEnd(err) {
@ -74,12 +111,20 @@ test('del', async (t) => {
resolve(persistedMsgs)
}
)
)
})
t.deepEqual(
persistedMsgs.filter((msg) => msg.content).map((msg) => msg.content.text),
['m0', 'm1', 'm3', 'm4'],
const stats2 = await p(log.stats)()
assert.deepEqual(
stats2,
{ totalBytes: 2880, deletedBytes: 615 },
'stats after delete and compact'
)
assert.deepEqual(
persistedMsgs
.filter((msg) => msg.data && msg.metadata.account?.length > 4)
.map((msg) => msg.data.text),
['m0', 'm1'],
'msgs in disk after the delete'
)
})

View File

@ -1,95 +1,131 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const AAOL = require('async-append-only-log')
const push = require('push-stream')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const Log = require('../lib/log')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-erase')
const DIR = path.join(os.tmpdir(), 'pzp-db-erase')
rimraf.sync(DIR)
test('erase', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.call(null, { keys, path: DIR })
test('erase()', async (t) => {
const peer = createPeer({
keypair: Keypair.generate('ed25519', 'alice'),
path: DIR,
})
await peer.db.loaded()
const msgHashes = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.create)({
type: 'post',
content: { text: 'm' + i },
const id = await p(peer.db.account.create)({
subdomain: 'person',
_nonce: 'alice',
})
msgHashes.push(rec.hash)
const msgIDs = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'm' + i },
})
msgIDs.push(rec.id)
}
const SAVED_UPON_ERASE = '{"text":"m*"}'.length - 'null'.length
const before = []
for (const msg of peer.db.msgs()) {
if (msg.content) before.push(msg.content.text)
for await (const msg of peer.db.msgs()) {
if (msg.data && msg.metadata.account?.length > 4) {
before.push(msg.data.text)
}
}
t.deepEqual(before, ['m0', 'm1', 'm2', 'm3', 'm4'], '5 msgs before the erase')
assert.deepEqual(
before,
['m0', 'm1', 'm2', 'm3', 'm4'],
'5 msgs before the erase'
)
await p(peer.db.erase)(msgHashes[2])
const EXPECTED_TOTAL_BYTES = 4158
const stats1 = await p(peer.db.log.stats)()
assert.deepEqual(
stats1,
{ totalBytes: EXPECTED_TOTAL_BYTES, deletedBytes: 0 },
'stats before erase and compact'
)
await p(peer.db.erase)(msgIDs[2])
const after = []
for (const msg of peer.db.msgs()) {
if (msg.content) after.push(msg.content.text)
for await (const msg of peer.db.msgs()) {
if (msg.data && msg.metadata.account?.length > 4) {
after.push(msg.data.text)
}
}
t.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], '4 msgs after the erase')
assert.deepEqual(after, ['m0', 'm1', 'm3', 'm4'], '4 msgs after the erase')
const after2 = []
for (const msg of peer.db.msgs()) {
for (const tangleId in msg.metadata.tangles) {
after2.push(msg.metadata.tangles[tangleId].depth)
for await (const msg of peer.db.msgs()) {
for (const tangleID in msg.metadata.tangles) {
after2.push(msg.metadata.tangles[tangleID].depth)
}
}
t.deepEqual(after2, [1, 2, 3, 4, 5], '5 metadata exists after the erase')
assert.deepEqual(after2, [1, 2, 3, 4, 5], '5 metadata exists after the erase')
await p(peer.db.log.compact)()
assert('compacted')
await p(peer.close)(true)
// FIXME:
// const log = AAOL(path.join(DIR, 'db.bin'), {
// cacheSize: 1,
// blockSize: 64 * 1024,
// codec: {
// encode(msg) {
// return Buffer.from(JSON.stringify(msg), 'utf8')
// },
// decode(buf) {
// return JSON.parse(buf.toString('utf8'))
// },
// },
// })
const log = Log(path.join(DIR, 'db', 'log'), {
cacheSize: 1,
blockSize: 64 * 1024,
codec: {
encode(msg) {
return Buffer.from(JSON.stringify(msg), 'utf8')
},
decode(buf) {
return JSON.parse(buf.toString('utf8'))
},
},
})
// const persistedMsgs = await new Promise((resolve, reject) => {
// let persistedMsgs = []
// log.stream({ offsets: true, values: true, sizes: true }).pipe(
// push.drain(
// function drainEach({ offset, value, size }) {
// if (value) {
// persistedMsgs.push(value.msg)
// }
// },
// function drainEnd(err) {
// if (err) return reject(err)
// resolve(persistedMsgs)
// }
// )
// )
// })
const persistedMsgs = await new Promise((resolve, reject) => {
let persistedMsgs = []
log.scan(
function drainEach(offset, rec, size) {
if (rec) {
persistedMsgs.push(rec.msg)
}
},
function drainEnd(err) {
if (err) return reject(err)
resolve(persistedMsgs)
}
)
})
// t.deepEqual(
// persistedMsgs.filter((msg) => msg.content).map((msg) => msg.content.text),
// ['m0', 'm1', 'm3', 'm4'],
// 'msgs in disk after the delete'
// )
const afterReopen = []
for (const msg of persistedMsgs) {
if (msg.data && msg.metadata.account?.length > 4) {
afterReopen.push(msg.data.text)
}
}
const stats2 = await p(log.stats)()
assert.deepEqual(
stats2,
{ totalBytes: EXPECTED_TOTAL_BYTES - SAVED_UPON_ERASE, deletedBytes: 0 },
'stats after erase and compact'
)
assert.deepEqual(
afterReopen,
['m0', 'm1', 'm3', 'm4'],
'4 msgs after the erase'
)
})

View File

@ -0,0 +1,30 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../lib/msg-v4')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'pzp-db-feed-find-moot')
rimraf.sync(DIR)
test('feed.findMoot()', async (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const id = await p(peer.db.account.create)({ subdomain: 'person' })
const moot = MsgV4.createMoot(id, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
await p(peer.db.add)(moot, mootID)
const mootRec = await p(peer.db.feed.findMoot)(id, 'post')
assert.equal(mootRec.id, mootID, 'feed.findMoot() returns moot ID')
await p(peer.close)(true)
})

31
test/feed-get-id.test.js Normal file
View File

@ -0,0 +1,31 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../lib/msg-v4')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'pzp-db-feed-get-id')
rimraf.sync(DIR)
test('feed.getID()', async (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const id = await p(peer.db.account.create)({ subdomain: 'person' })
const moot = MsgV4.createMoot(id, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
assert.equal(
peer.db.feed.getID(id, 'post'),
mootID,
'feed.getID() returns moot ID'
)
await p(peer.close)(true)
})

158
test/feed-publish.test.js Normal file
View File

@ -0,0 +1,158 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../lib/msg-v4')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'pzp-db-feed-publish')
rimraf.sync(DIR)
test('feed.publish()', async (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const bobKeypair = Keypair.generate('ed25519', 'bob')
let peer
let id
let moot
let mootID
// Setup
{
peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
id = await p(peer.db.account.create)({ subdomain: 'person' })
moot = MsgV4.createMoot(id, 'post', keypair)
mootID = MsgV4.getMsgID(moot)
}
let msgID1
let rec1
let msgID2
await t.test('can add new msgs to the db', async (t) => {
rec1 = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'I am 1st post' },
})
assert.equal(rec1.msg.data.text, 'I am 1st post', 'msg1 text correct')
assert.equal(
rec1.msg.metadata.tangles[mootID].depth,
1,
'msg1 tangle depth correct'
)
assert.deepEqual(
rec1.msg.metadata.tangles[mootID].prev,
[mootID],
'msg1 tangle prev correct'
)
msgID1 = MsgV4.getMsgID(rec1.msg)
const rec2 = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'I am 2nd post' },
})
assert.equal(rec2.msg.data.text, 'I am 2nd post', 'msg2 text correct')
assert.equal(
rec2.msg.metadata.tangles[mootID].depth,
2,
'msg2 tangle depth correct'
)
assert.deepEqual(
rec2.msg.metadata.tangles[mootID].prev,
[msgID1],
'msg2 tangle prev correct'
)
msgID2 = MsgV4.getMsgID(rec2.msg)
})
await t.test('merges tangle after a forked add()', async (t) => {
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
tangle.add(rec1.id, rec1.msg)
const msg3 = MsgV4.create({
keypair,
account: id,
accountTips: [id],
domain: 'post',
data: { text: '3rd post forked from 1st' },
tangles: {
[mootID]: tangle,
},
})
const rec3 = await p(peer.db.add)(msg3, mootID)
const msgID3 = MsgV4.getMsgID(rec3.msg)
const rec4 = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'I am 4th post' },
})
assert.ok(rec4, '4th post published')
assert.equal(
rec4.msg.metadata.tangles[mootID].prev.length,
3,
'msg4 prev has 3' // is root, msg2 and msg3'
)
assert.ok(
rec4.msg.metadata.tangles[mootID].prev.includes(mootID),
'msg4 prev has root'
)
assert.ok(
rec4.msg.metadata.tangles[mootID].prev.includes(msgID2),
'msg4 prev has msg2'
)
assert.ok(
rec4.msg.metadata.tangles[mootID].prev.includes(msgID3),
'msg4 prev has msg3'
)
})
await t.test('publish encrypted with box', async (t) => {
const recEncrypted = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'I am chewing food', recps: [keypair.public] },
encryptionFormat: 'box',
})
assert.equal(typeof recEncrypted.msg.data, 'string')
assert.ok(recEncrypted.msg.data.endsWith('.box'), '.box')
const msgDecrypted = await p(peer.db.get)(recEncrypted.id)
assert.equal(msgDecrypted.data.text, 'I am chewing food')
})
await t.test('publish with tangles', async (t) => {
const recA = await p(peer.db.feed.publish)({
account: id,
domain: 'comment',
data: { text: 'I am root' },
})
assert.equal(recA.msg.data.text, 'I am root', 'root text correct')
const recB = await p(peer.db.feed.publish)({
account: id,
domain: 'comment',
data: { text: 'I am comment 1' },
tangles: [recA.id],
keypair: bobKeypair,
})
assert.equal(recB.msg.metadata.tangles[recA.id].depth, 1, 'tangle depth 1')
assert.deepEqual(
recB.msg.metadata.tangles[recA.id].prev,
[recA.id],
'tangle prev'
)
})
await p(peer.close)(true)
})

View File

@ -1,198 +0,0 @@
const tape = require('tape')
const FeedV1 = require('../../lib/feed-v1')
const { generateKeypair } = require('../util')
let rootMsg = null
let rootHash = null
tape('FeedV1.createRoot()', (t) => {
const keys = generateKeypair('alice')
rootMsg = FeedV1.createRoot(keys, 'post')
t.equals(rootMsg.content, null, 'content')
t.equals(rootMsg.metadata.hash, null, 'hash')
t.equals(rootMsg.metadata.size, 0, 'size')
t.equals(rootMsg.metadata.type, 'post', 'type')
t.equals(rootMsg.metadata.who, FeedV1.stripAuthor(keys.id), 'who')
t.deepEquals(rootMsg.metadata.tangles, {}, 'tangles')
console.log(rootMsg)
rootHash = FeedV1.getMsgHash(rootMsg)
t.equals(rootHash, '3F26EgnwbMHm1EEeeVM1Eb', 'root hash')
t.end()
})
tape('FeedV1.create()', (t) => {
const keys = generateKeypair('alice')
const content = { text: 'Hello world!' }
const tangle1 = new FeedV1.Tangle(rootHash)
tangle1.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle1,
},
})
t.deepEquals(
Object.keys(msg1.metadata),
['hash', 'size', 'tangles', 'type', 'v', 'who'],
'metadata fields'
)
t.equals(
msg1.metadata.who,
'4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW',
'metadata.who'
)
t.equals(msg1.metadata.type, 'post', 'metadata.type')
t.deepEquals(msg1.metadata.hash, '9R7XmBhHF5ooPg34j9TQcz', 'metadata.hash')
t.deepEquals(Object.keys(msg1.metadata.tangles), [rootHash], 'tangles')
t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'tangle depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'tangle prev')
t.deepEquals(msg1.metadata.size, 23, 'metadata.size')
t.deepEqual(msg1.content, content, 'content is correct')
console.log(JSON.stringify(msg1, null, 2))
const msgHash1 = 'MTYQM89hvHuiVKaw8Ze7kc'
t.equals(
FeedV1.getMsgId(msg1),
'ppppp:message/v1/4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW/post/' +
msgHash1,
'getMsgId'
)
const tangle2 = new FeedV1.Tangle(rootHash)
tangle2.add(rootHash, rootMsg)
tangle2.add(msgHash1, msg1)
const content2 = { text: 'Ola mundo!' }
const msg2 = FeedV1.create({
keys,
content: content2,
type: 'post',
tangles: {
[rootHash]: tangle2,
},
})
t.deepEquals(
Object.keys(msg2.metadata),
['hash', 'size', 'tangles', 'type', 'v', 'who'],
'metadata keys'
)
t.equals(
msg2.metadata.who,
'4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW',
'metadata.who'
)
t.equals(msg2.metadata.type, 'post', 'metadata.type')
t.deepEquals(Object.keys(msg1.metadata.tangles), [rootHash], 'tangles')
t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'tangle depth')
t.deepEquals(msg2.metadata.tangles[rootHash].prev, [msgHash1], 'tangle prev')
t.deepEquals(msg2.metadata.hash, 'XuZEzH1Dhy1yuRMcviBBcN', 'metadata.hash')
t.deepEquals(msg2.metadata.size, 21, 'metadata.size')
t.deepEqual(msg2.content, content2, 'content is correct')
console.log(JSON.stringify(msg2, null, 2))
t.deepEqual(
FeedV1.getMsgId(msg2),
'ppppp:message/v1/4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW/post/T7juKvDH2bqEUhJB9Dxctr',
'getMsgId'
)
t.end()
})
tape('create() handles DAG tips correctly', (t) => {
const keys = generateKeypair('alice')
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: '1' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
t.deepEquals(
msg1.metadata.tangles[rootHash].prev,
[FeedV1.getFeedRootHash(keys.id, 'post')],
'msg1.prev is root'
)
tangle.add(msgHash1, msg1)
const msg2A = FeedV1.create({
keys,
content: { text: '2A' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
t.deepEquals(
msg2A.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2A.prev is msg1'
)
const msg2B = FeedV1.create({
keys,
content: { text: '2B' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2B = FeedV1.getMsgHash(msg2B)
t.deepEquals(
msg2B.metadata.tangles[rootHash].prev,
[msgHash1],
'msg2B.prev is msg1'
)
tangle.add(msgHash2B, msg2B)
const msg3 = FeedV1.create({
keys,
content: { text: '3' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash3 = FeedV1.getMsgHash(msg3)
t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2B].sort(),
'msg3.prev is [root(lipmaa),msg2B(previous)], sorted'
)
tangle.add(msgHash3, msg3)
const msgHash2A = FeedV1.getMsgHash(msg2A)
tangle.add(msgHash2A, msg2A)
t.pass('msg2A comes into awareness')
const msg4 = FeedV1.create({
keys,
content: { text: '4' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
t.deepEquals(
msg4.metadata.tangles[rootHash].prev,
[msgHash3, msgHash2A].sort(),
'msg4.prev is [msg3(previous),msg2A(old fork as tip)], sorted'
)
t.end()
})

View File

@ -1,314 +0,0 @@
const tape = require('tape')
const base58 = require('bs58')
const FeedV1 = require('../../lib/feed-v1')
const { generateKeypair } = require('../util')
tape('invalid msg with non-array prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
msg.metadata.tangles[rootHash].prev = null
const msgHash = FeedV1.getMsgHash(msg)
const err = FeedV1.validate(msg, tangle, msgHash, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(err.message, /prev must be an array/, 'invalid 2nd msg description')
t.end()
})
tape('invalid msg with bad prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.tangles[rootHash].prev = [1234]
const msgHash2 = FeedV1.getMsgHash(msg2)
const err = FeedV1.validate(msg2, tangle, msgHash2, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
/prev must contain strings/,
'invalid 2nd msg description'
)
t.end()
})
tape('invalid msg with URI in prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2 = FeedV1.getMsgHash(msg2)
const randBuf = Buffer.alloc(16).fill(16)
const fakeMsgKey1 = `ppppp:message/v1/${base58.encode(randBuf)}`
msg2.metadata.tangles[rootHash].depth = 1
msg2.metadata.tangles[rootHash].prev = [fakeMsgKey1]
const err = FeedV1.validate(msg2, tangle, msgHash2, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
/prev must not contain URIs/,
'invalid 2nd msg description'
)
t.end()
})
tape('invalid msg with unknown prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const unknownMsg = FeedV1.create({
keys,
content: { text: 'Alien' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const unknownMsgHash = FeedV1.getMsgHash(unknownMsg)
const fakeRootHash = 'ABCDEabcde' + rootHash.substring(10)
const tangle2 = new FeedV1.Tangle(fakeRootHash)
tangle2.add(fakeRootHash, rootMsg)
tangle2.add(unknownMsgHash, unknownMsg)
const msg2 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle2,
},
})
const msgHash2 = FeedV1.getMsgHash(msg2)
const err = FeedV1.validate(msg2, tangle, msgHash2, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
/all prev are locally unknown/,
'invalid 2nd msg description'
)
t.end()
})
tape('invalid feed msg with a different who', (t) => {
const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob')
const rootMsg = FeedV1.createRoot(keysA, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const feedTangle = new FeedV1.Tangle(rootHash)
feedTangle.add(rootHash, rootMsg)
const msg = FeedV1.create({
keys: keysB,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: feedTangle,
},
})
const msgHash = FeedV1.getMsgHash(msg)
const err = FeedV1.validate(msg, feedTangle, msgHash, rootHash)
t.match(err.message, /who ".*" does not match feed who/, 'invalid feed msg')
t.end()
})
tape('invalid feed msg with a different type', (t) => {
const keysA = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keysA, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const feedTangle = new FeedV1.Tangle(rootHash)
feedTangle.add(rootHash, rootMsg)
const msg = FeedV1.create({
keys: keysA,
content: { text: 'Hello world!' },
type: 'comment',
tangles: {
[rootHash]: feedTangle,
},
})
const msgHash = FeedV1.getMsgHash(msg)
const err = FeedV1.validate(msg, feedTangle, msgHash, rootHash)
t.match(
err.message,
/type "comment" does not match feed type "post"/,
'invalid feed msg'
)
t.end()
})
tape('invalid feed msg with non-alphabetical prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: '1' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const msg2 = FeedV1.create({
keys,
content: { text: '2' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2 = FeedV1.getMsgHash(msg2)
tangle.add(msgHash1, msg1)
tangle.add(msgHash2, msg2)
const msg3 = FeedV1.create({
keys,
content: { text: '3' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash3 = FeedV1.getMsgHash(msg3)
let prevHashes = msg3.metadata.tangles[rootHash].prev
if (prevHashes[0] < prevHashes[1]) {
prevHashes = [prevHashes[1], prevHashes[0]]
} else {
prevHashes = [prevHashes[0], prevHashes[1]]
}
msg3.metadata.tangles[rootHash].prev = prevHashes
const err = FeedV1.validate(msg3, tangle, msgHash3, rootHash)
t.ok(err, 'invalid 3rd msg throws')
t.match(
err.message,
/prev must be sorted in alphabetical order/,
'invalid error message'
)
t.end()
})
tape('invalid feed msg with duplicate prev', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: '1' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const [prevHash] = msg1.metadata.tangles[rootHash].prev
msg1.metadata.tangles[rootHash].prev = [prevHash, prevHash]
const err = FeedV1.validate(msg1, tangle, msgHash1, rootHash)
t.ok(err, 'invalid 1st msg throws')
t.match(
err.message,
/prev must be unique set/,
'invalid error message'
)
t.end()
})

View File

@ -1,89 +0,0 @@
const tape = require('tape')
const FeedV1 = require('../../lib/feed-v1')
const { generateKeypair } = require('../util')
tape('invalid type not a string', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 123,
})
},
/type is not a string/,
'invalid type if contains /'
)
t.end()
})
tape('invalid type with "/" character', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'group/init',
})
},
/invalid type/,
'invalid type if contains /'
)
t.end()
})
tape('invalid type with "*" character', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'star*',
})
},
/invalid type/,
'invalid type if contains *'
)
t.end()
})
tape('invalid type too short', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'xy',
})
},
/shorter than 3/,
'invalid type if too short'
)
t.end()
})
tape('invalid type too long', (t) => {
const keys = generateKeypair('alice')
t.throws(
() => {
FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'a'.repeat(120),
})
},
/100\+ characters long/,
'invalid type if too long'
)
t.end()
})

View File

@ -1,114 +0,0 @@
const tape = require('tape')
const FeedV1 = require('../../lib/feed-v1')
const { generateKeypair } = require('../util')
tape('lipmaa prevs', (t) => {
const keys = generateKeypair('alice')
const content = { text: 'Hello world!' }
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
t.equals(msg1.metadata.tangles[rootHash].depth, 1, 'msg1 depth')
t.deepEquals(msg1.metadata.tangles[rootHash].prev, [rootHash], 'msg1 prev')
const msg2 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash2 = FeedV1.getMsgHash(msg2)
tangle.add(msgHash2, msg2)
t.equals(msg2.metadata.tangles[rootHash].depth, 2, 'msg2 depth')
t.deepEquals(msg2.metadata.tangles[rootHash].prev, [msgHash1], 'msg2 prev')
const msg3 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash3 = FeedV1.getMsgHash(msg3)
tangle.add(msgHash3, msg3)
t.equals(msg3.metadata.tangles[rootHash].depth, 3, 'msg3 depth')
t.deepEquals(
msg3.metadata.tangles[rootHash].prev,
[rootHash, msgHash2].sort(),
'msg3 prev (has lipmaa!)'
)
const msg4 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash4 = FeedV1.getMsgHash(msg4)
tangle.add(msgHash4, msg4)
t.equals(msg4.metadata.tangles[rootHash].depth, 4, 'msg4 depth')
t.deepEquals(msg4.metadata.tangles[rootHash].prev, [msgHash3], 'msg4 prev')
const msg5 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash5 = FeedV1.getMsgHash(msg5)
tangle.add(msgHash5, msg5)
t.equals(msg5.metadata.tangles[rootHash].depth, 5, 'msg5 depth')
t.deepEquals(msg5.metadata.tangles[rootHash].prev, [msgHash4], 'msg5 prev')
const msg6 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash6 = FeedV1.getMsgHash(msg6)
tangle.add(msgHash6, msg6)
t.equals(msg6.metadata.tangles[rootHash].depth, 6, 'msg6 depth')
t.deepEquals(msg6.metadata.tangles[rootHash].prev, [msgHash5], 'msg6 prev')
const msg7 = FeedV1.create({
keys,
content,
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash7 = FeedV1.getMsgHash(msg7)
tangle.add(msgHash7, msg7)
t.equals(msg7.metadata.tangles[rootHash].depth, 7, 'msg7 depth')
t.deepEquals(
msg7.metadata.tangles[rootHash].prev,
[msgHash3, msgHash6].sort(),
'msg7 prev (has lipmaa!)'
)
t.end()
})

View File

@ -1,162 +0,0 @@
const tape = require('tape')
const FeedV1 = require('../../lib/feed-v1')
const { generateKeypair } = require('../util')
tape('simple multi-author tangle', (t) => {
const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob')
const rootMsgA = FeedV1.createRoot(keysA, 'post')
const rootHashA = FeedV1.getMsgHash(rootMsgA)
const tangleA = new FeedV1.Tangle(rootHashA)
tangleA.add(rootHashA, rootMsgA)
const rootMsgB = FeedV1.createRoot(keysB, 'post')
const rootHashB = FeedV1.getMsgHash(rootMsgB)
const tangleB = new FeedV1.Tangle(rootHashB)
tangleB.add(rootHashB, rootMsgB)
const msg1 = FeedV1.create({
keys: keysA,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHashA]: tangleA,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
t.deepEquals(
Object.keys(msg1.metadata.tangles),
[rootHashA],
'msg1 has only feed tangle'
)
const tangleX = new FeedV1.Tangle(msgHash1)
tangleX.add(msgHash1, msg1)
const msg2 = FeedV1.create({
keys: keysB,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHashB]: tangleB,
[msgHash1]: tangleX,
},
})
t.deepEquals(
Object.keys(msg2.metadata.tangles),
[rootHashB, msgHash1].sort(),
'msg2 has feed tangle and misc tangle'
)
t.equal(msg2.metadata.tangles[rootHashB].depth, 1, 'msg2 feed tangle depth')
t.deepEquals(
msg2.metadata.tangles[rootHashB].prev,
[rootHashB],
'msg2 feed tangle prev'
)
t.equal(msg2.metadata.tangles[msgHash1].depth, 1, 'msg2 has tangle depth 1')
t.deepEquals(
msg2.metadata.tangles[msgHash1].prev,
[msgHash1],
'msg2 has tangle prev'
)
t.end()
})
tape('lipmaa in multi-author tangle', (t) => {
const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob')
const content = { text: 'Hello world!' }
const rootMsgA = FeedV1.createRoot(keysA, 'post')
const rootHashA = FeedV1.getMsgHash(rootMsgA)
const tangleA = new FeedV1.Tangle(rootHashA)
tangleA.add(rootHashA, rootMsgA)
const rootMsgB = FeedV1.createRoot(keysB, 'post')
const rootHashB = FeedV1.getMsgHash(rootMsgB)
const tangleB = new FeedV1.Tangle(rootHashB)
tangleB.add(rootHashB, rootMsgB)
const msg1 = FeedV1.create({
keys: keysA,
content,
type: 'post',
tangles: {
[rootHashA]: tangleA,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
tangleA.add(msgHash1, msg1)
const tangleThread = new FeedV1.Tangle(msgHash1)
tangleThread.add(msgHash1, msg1)
t.deepEquals(
Object.keys(msg1.metadata.tangles),
[rootHashA],
'A:msg1 has only feed tangle'
)
const msg2 = FeedV1.create({
keys: keysB,
content,
type: 'post',
tangles: {
[rootHashB]: tangleB,
[msgHash1]: tangleThread,
},
})
const msgHash2 = FeedV1.getMsgHash(msg2)
tangleB.add(msgHash2, msg2)
tangleThread.add(msgHash2, msg2)
t.deepEquals(
msg2.metadata.tangles[msgHash1].prev,
[msgHash1],
'B:msg2 points to A:msg1'
)
const msg3 = FeedV1.create({
keys: keysB,
content,
type: 'post',
tangles: {
[rootHashB]: tangleB,
[msgHash1]: tangleThread,
},
})
const msgHash3 = FeedV1.getMsgHash(msg3)
tangleB.add(msgHash3, msg3)
tangleThread.add(msgHash3, msg3)
t.deepEquals(
msg3.metadata.tangles[msgHash1].prev,
[msgHash2],
'B:msg3 points to B:msg2'
)
const msg4 = FeedV1.create({
keys: keysA,
content,
type: 'post',
tangles: {
[rootHashA]: tangleA,
[msgHash1]: tangleThread,
},
})
const msgHash4 = FeedV1.getMsgHash(msg4)
tangleB.add(msgHash4, msg4)
tangleThread.add(msgHash4, msg4)
t.deepEquals(
msg4.metadata.tangles[msgHash1].prev,
[msgHash1, msgHash3].sort(),
'A:msg4 points to A:msg1,B:msg3'
)
t.end()
})

View File

@ -1,111 +0,0 @@
const tape = require('tape')
const base58 = require('bs58')
const FeedV1 = require('../../lib/feed-v1')
const { generateKeypair } = require('../util')
tape('validate root msg', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
const err = FeedV1.validate(rootMsg, tangle, rootHash, rootHash)
if (err) console.log(err)
t.error(err, 'valid root msg')
t.end()
})
tape('validate 2nd msg with existing root', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
tangle.add(msgHash1, msg1)
const err = FeedV1.validate(msg1, tangle, msgHash1, rootHash)
if (err) console.log(err)
t.error(err, 'valid 2nd msg')
t.end()
})
tape('validate 2nd forked msg', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1A = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
existing: new Map(),
})
const msgHash1A = FeedV1.getMsgHash(msg1A)
const msg1B = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1B = FeedV1.getMsgHash(msg1B)
tangle.add(msgHash1A, msg1A)
tangle.add(msgHash1B, msg1B)
const err = FeedV1.validate(msg1B, tangle, msgHash1B, rootHash)
if (err) console.log(err)
t.error(err, 'valid 2nd forked msg')
t.end()
})
tape('invalid msg with unknown previous', (t) => {
const keys = generateKeypair('alice')
const rootMsg = FeedV1.createRoot(keys, 'post')
const rootHash = FeedV1.getMsgHash(rootMsg)
const tangle = new FeedV1.Tangle(rootHash)
tangle.add(rootHash, rootMsg)
const msg1 = FeedV1.create({
keys,
content: { text: 'Hello world!' },
type: 'post',
tangles: {
[rootHash]: tangle,
},
})
const msgHash1 = FeedV1.getMsgHash(msg1)
const fakeMsgHash = base58.encode(Buffer.alloc(16).fill(42))
msg1.metadata.tangles[rootHash].prev = [fakeMsgHash]
const err = FeedV1.validate(msg1, tangle, msgHash1, rootHash)
t.ok(err, 'invalid 2nd msg throws')
t.match(
err.message,
/all prev are locally unknown/,
'invalid 2nd msg description'
)
t.end()
})

View File

@ -1,48 +1,36 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const FeedV1 = require('../lib/feed-v1')
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../lib/msg-v4')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-get')
const DIR = path.join(os.tmpdir(), 'pzp-db-get')
rimraf.sync(DIR)
const keys = generateKeypair('alice')
let peer
let msgHash1
let msgId1
test('setup', async (t) => {
peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
test('get()', async (t) => {
const peer = createPeer({
keypair: Keypair.generate('ed25519', 'alice'),
path: DIR,
})
await peer.db.loaded()
const rec1 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am 1st post' },
const id = await p(peer.db.account.create)({ subdomain: 'person' })
const rec1 = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'I am 1st post' },
})
msgHash1 = FeedV1.getMsgHash(rec1.msg)
msgId1 = FeedV1.getMsgId(rec1.msg)
})
const msgID1 = MsgV4.getMsgID(rec1.msg)
test('get() supports ppppp URIs', async (t) => {
const msg = peer.db.get(msgId1)
t.ok(msg, 'msg exists')
t.equals(msg.content.text, 'I am 1st post')
})
const msg = await p(peer.db.get)(msgID1)
assert.ok(msg, 'msg exists')
assert.equal(msg.data.text, 'I am 1st post')
test('get() supports msg hashes', async (t) => {
const msg = peer.db.get(msgHash1)
t.ok(msg, 'msg exists')
t.equals(msg.content.text, 'I am 1st post')
})
test('teardown', (t) => {
peer.close(t.end)
await p(peer.close)(true)
})

View File

@ -1,214 +1,353 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-tangle')
const DIR = path.join(os.tmpdir(), 'pzp-db-tangle')
rimraf.sync(DIR)
let peer
let rootPost, reply1Lo, reply1Hi, reply2A, reply3Lo, reply3Hi
let tangle
test('setup', async (t) => {
const keysA = generateKeypair('alice')
const keysB = generateKeypair('bob')
const keysC = generateKeypair('carol')
/**
* /-reply1Hi <-\ /--reply3Hi
* root <-< >-reply2 <-<
* \--reply1Lo <-/ \--reply3Lo
*/
test('getTangle()', async (t) => {
let peer
let rootPost, reply1Lo, reply1Hi, reply2, reply3Lo, reply3Hi
let reply1LoText, reply1HiText, reply3LoText, reply3HiText
let tangle
peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys: keysA, path: DIR })
// Setup
{
const keypairA = Keypair.generate('ed25519', 'alice')
const keypairB = Keypair.generate('ed25519', 'bob')
const keypairC = Keypair.generate('ed25519', 'carol')
peer = createPeer({ path: DIR, keypair: keypairA })
await peer.db.loaded()
// Slow down append so that we can create msgs in parallel
const id = await p(peer.db.account.create)({
subdomain: 'person',
_nonce: 'alice',
})
// Slow down append so that we can trigger msg creation in parallel
const originalAppend = peer.db._getLog().append
peer.db._getLog().append = function (...args) {
setTimeout(originalAppend, 20, ...args)
}
rootPost = (
await p(peer.db.create)({
keys: keysA,
type: 'comment',
content: { text: 'root' },
await p(peer.db.feed.publish)({
account: id,
keypair: keypairA,
domain: 'comment',
data: { text: 'root' },
})
).hash
).id
const [{ hash: reply1B }, { hash: reply1C }] = await Promise.all([
p(peer.db.create)({
keys: keysB,
type: 'comment',
content: { text: 'reply 1' },
const [{ id: reply1B }, { id: reply1C }] = await Promise.all([
p(peer.db.feed.publish)({
account: id,
keypair: keypairB,
domain: 'comment',
data: { text: 'reply 1B' },
tangles: [rootPost],
}),
p(peer.db.create)({
keys: keysC,
type: 'comment',
content: { text: 'reply 1' },
p(peer.db.feed.publish)({
account: id,
keypair: keypairC,
domain: 'comment',
data: { text: 'reply 1C' },
tangles: [rootPost],
}),
])
reply1Lo = reply1B.localeCompare(reply1C) < 0 ? reply1B : reply1C
reply1Hi = reply1B.localeCompare(reply1C) < 0 ? reply1C : reply1B
reply1LoText = reply1B.localeCompare(reply1C) < 0 ? 'reply 1B' : 'reply 1C'
reply1HiText = reply1B.localeCompare(reply1C) < 0 ? 'reply 1C' : 'reply 1B'
reply2A = (
await p(peer.db.create)({
keys: keysA,
type: 'comment',
content: { text: 'reply 2' },
reply2 = (
await p(peer.db.feed.publish)({
account: id,
keypair: keypairA,
domain: 'comment',
data: { text: 'reply 2' },
tangles: [rootPost],
})
).hash
).id
const [{ hash: reply3B }, { hash: reply3C }] = await Promise.all([
p(peer.db.create)({
keys: keysB,
type: 'comment',
content: { text: 'reply 3' },
const [{ id: reply3B }, { id: reply3C }] = await Promise.all([
p(peer.db.feed.publish)({
account: id,
keypair: keypairB,
domain: 'comment',
data: { text: 'reply 3B' },
tangles: [rootPost],
}),
p(peer.db.create)({
keys: keysC,
type: 'comment',
content: { text: 'reply 3' },
p(peer.db.feed.publish)({
account: id,
keypair: keypairC,
domain: 'comment',
data: { text: 'reply 3C' },
tangles: [rootPost],
}),
])
reply3Lo = reply3B.localeCompare(reply3C) < 0 ? reply3B : reply3C
reply3Hi = reply3B.localeCompare(reply3C) < 0 ? reply3C : reply3B
reply3LoText = reply3B.localeCompare(reply3C) < 0 ? 'reply 3B' : 'reply 3C'
reply3HiText = reply3B.localeCompare(reply3C) < 0 ? 'reply 3C' : 'reply 3B'
tangle = peer.db.getTangle(rootPost)
})
tangle = await p(peer.db.getTangle)(rootPost)
}
test('Tangle.has', (t) => {
t.true(tangle.has(rootPost), 'has rootPost')
t.true(tangle.has(reply1Lo), 'has reply1Lo')
t.true(tangle.has(reply1Hi), 'has reply1Hi')
t.true(tangle.has(reply2A), 'has reply2A')
t.true(tangle.has(reply3Lo), 'has reply3Lo')
t.true(tangle.has(reply3Hi), 'has reply3Hi')
t.false(tangle.has('nonsense'), 'does not have nonsense')
t.end()
})
await t.test('getTangle unknown ID returns null', async (t) => {
assert.equal(
await p(peer.db.getTangle)('Lq6xwbdvGVmSsY3oYRugpZ3DY8chX9SLhRhjJKyZHQn'),
null
)
})
test('Tangle.getDepth', (t) => {
t.equals(tangle.getDepth(rootPost), 0, 'depth of rootPost is 0')
t.equals(tangle.getDepth(reply1Lo), 1, 'depth of reply1Lo is 1')
t.equals(tangle.getDepth(reply1Hi), 1, 'depth of reply1Hi is 1')
t.equals(tangle.getDepth(reply2A), 2, 'depth of reply2A is 2')
t.equals(tangle.getDepth(reply3Lo), 3, 'depth of reply3Lo is 3')
t.equals(tangle.getDepth(reply3Hi), 3, 'depth of reply3Hi is 3')
t.end()
})
await t.test('Tangle.has', (t) => {
assert.equal(tangle.has(rootPost), true, 'has rootPost')
assert.equal(tangle.has(reply1Lo), true, 'has reply1Lo')
assert.equal(tangle.has(reply1Hi), true, 'has reply1Hi')
assert.equal(tangle.has(reply2), true, 'has reply2A')
assert.equal(tangle.has(reply3Lo), true, 'has reply3Lo')
assert.equal(tangle.has(reply3Hi), true, 'has reply3Hi')
assert.equal(tangle.has('nonsense'), false, 'does not have nonsense')
})
test('Tangle.getMaxDepth', (t) => {
t.equals(tangle.getMaxDepth(), 3, 'max depth is 3')
t.end()
})
await t.test('Tangle.getDepth', (t) => {
assert.equal(tangle.getDepth(rootPost), 0, 'depth of rootPost is 0')
assert.equal(tangle.getDepth(reply1Lo), 1, 'depth of reply1Lo is 1')
assert.equal(tangle.getDepth(reply1Hi), 1, 'depth of reply1Hi is 1')
assert.equal(tangle.getDepth(reply2), 2, 'depth of reply2A is 2')
assert.equal(tangle.getDepth(reply3Lo), 3, 'depth of reply3Lo is 3')
assert.equal(tangle.getDepth(reply3Hi), 3, 'depth of reply3Hi is 3')
})
test('Tangle.topoSort', (t) => {
await t.test('Tangle.maxDepth', (t) => {
assert.equal(tangle.maxDepth, 3, 'max depth is 3')
})
await t.test('Tangle.topoSort', (t) => {
const sorted = tangle.topoSort()
t.deepEquals(sorted, [
assert.deepEqual(sorted, [
rootPost,
reply1Lo,
reply1Hi,
reply2A,
reply2,
reply3Lo,
reply3Hi,
])
t.end()
})
})
test('Tangle.precedes', (t) => {
t.true(tangle.precedes(rootPost, reply1Lo), 'rootPost precedes reply1Lo')
t.true(tangle.precedes(rootPost, reply1Hi), 'rootPost precedes reply1Hi')
t.false(
await t.test('Tangle.precedes', (t) => {
assert.equal(
tangle.precedes(rootPost, reply1Lo),
true,
'rootPost precedes reply1Lo'
)
assert.equal(
tangle.precedes(rootPost, reply1Hi),
true,
'rootPost precedes reply1Hi'
)
assert.equal(
tangle.precedes(reply1Hi, rootPost),
false,
'reply1Hi doesnt precede rootPost'
)
t.false(
assert.equal(
tangle.precedes(reply1Lo, reply1Hi),
false,
'reply1Lo doesnt precede reply1Hi'
)
t.false(tangle.precedes(reply1Lo, reply1Lo), 'reply1Lo doesnt precede itself')
t.true(tangle.precedes(reply1Lo, reply3Hi), 'reply1Lo precedes reply3Hi')
t.true(tangle.precedes(reply1Hi, reply2A), 'reply1Hi precedes reply2A')
t.false(
assert.equal(
tangle.precedes(reply1Lo, reply1Lo),
false,
'reply1Lo doesnt precede itself'
)
assert.equal(
tangle.precedes(reply1Lo, reply3Hi),
true,
'reply1Lo precedes reply3Hi'
)
assert.equal(
tangle.precedes(reply1Hi, reply2),
true,
'reply1Hi precedes reply2A'
)
assert.equal(
tangle.precedes(reply3Lo, reply1Hi),
false,
'reply3Lo doesnt precede reply1Hi'
)
})
t.end()
})
await t.test('Tangle.tips', (t) => {
const tips = tangle.tips
test('Tangle.getTips', (t) => {
const tips = tangle.getTips()
assert.equal(tips.size, 2, 'there are 2 tips')
assert.equal(tips.has(reply3Lo), true, 'tips contains reply3Lo')
assert.equal(tips.has(reply3Hi), true, 'tips contains reply3Hi')
})
t.equals(tips.size, 2, 'there are 2 tips')
t.true(tips.has(reply3Lo), 'tips contains reply3Lo')
t.true(tips.has(reply3Hi), 'tips contains reply3Hi')
t.end()
})
await t.test('Tangle.getLipmaaSet', (t) => {
assert.equal(tangle.getLipmaaSet(0).size, 0, 'lipmaa 0 (empty)')
test('Tangle.getLipmaaSet', (t) => {
t.equals(tangle.getLipmaaSet(0).size, 0, 'lipmaa 0 (empty)')
assert.equal(tangle.getLipmaaSet(1).size, 1, 'lipmaa 1 (-1)')
assert.equal(tangle.getLipmaaSet(1).has(rootPost), true, 'lipmaa 1 (-1)')
t.equals(tangle.getLipmaaSet(1).size, 1, 'lipmaa 1 (-1)')
t.true(tangle.getLipmaaSet(1).has(rootPost), 'lipmaa 1 (-1)')
assert.equal(tangle.getLipmaaSet(2).size, 2, 'lipmaa 2 (-1)')
assert.equal(tangle.getLipmaaSet(2).has(reply1Lo), true, 'lipmaa 2 (-1)')
assert.equal(tangle.getLipmaaSet(2).has(reply1Hi), true, 'lipmaa 2 (-1)')
t.equals(tangle.getLipmaaSet(2).size, 2, 'lipmaa 2 (-1)')
t.true(tangle.getLipmaaSet(2).has(reply1Lo), 'lipmaa 2 (-1)')
t.true(tangle.getLipmaaSet(2).has(reply1Hi), 'lipmaa 2 (-1)')
assert.equal(tangle.getLipmaaSet(3).size, 1, 'lipmaa 3 (leap!)')
assert.equal(tangle.getLipmaaSet(3).has(rootPost), true, 'lipmaa 3 (leap!)')
t.equals(tangle.getLipmaaSet(3).size, 1, 'lipmaa 3 (leap!)')
t.true(tangle.getLipmaaSet(3).has(rootPost), 'lipmaa 3 (leap!)')
assert.equal(tangle.getLipmaaSet(4).size, 2, 'lipmaa 4 (-1)')
assert.equal(tangle.getLipmaaSet(4).has(reply3Lo), true, 'lipmaa 4 (-1)')
assert.equal(tangle.getLipmaaSet(4).has(reply3Hi), true, 'lipmaa 4 (-1)')
t.equals(tangle.getLipmaaSet(4).size, 2, 'lipmaa 4 (-1)')
t.true(tangle.getLipmaaSet(4).has(reply3Lo), 'lipmaa 4 (-1)')
t.true(tangle.getLipmaaSet(4).has(reply3Hi), 'lipmaa 4 (-1)')
assert.equal(tangle.getLipmaaSet(5).size, 0, 'lipmaa 5 (empty)')
})
t.equals(tangle.getLipmaaSet(5).size, 0, 'lipmaa 5 (empty)')
await t.test('Tangle.getDeletablesAndErasables basic', (t) => {
const { deletables, erasables } = tangle.getDeletablesAndErasables(reply2)
t.end()
})
assert.deepEqual([...deletables], [reply1Hi], 'deletables')
assert.deepEqual([...erasables], [reply1Lo, rootPost], 'erasables')
})
test('Tangle.getDeletablesAndErasables basic', (t) => {
const { deletables, erasables } = tangle.getDeletablesAndErasables(reply2A)
await t.test('Tangle.getDeletablesAndErasables with many inputs', (t) => {
const { deletables, erasables } = tangle.getDeletablesAndErasables(
reply3Lo,
reply2
)
t.deepEquals(deletables, [reply1Hi], 'deletables')
t.deepEquals(erasables, [reply1Lo, rootPost], 'erasables')
t.end()
})
assert.deepEqual([...deletables], [reply1Hi], 'deletables')
assert.deepEqual([...erasables], [reply1Lo, rootPost], 'erasables')
})
test('Tangle.getDeletablesAndErasables with lipmaa', (t) => {
await t.test('Tangle.getDeletablesAndErasables with many inputs (2)', (t) => {
const { deletables, erasables } = tangle.getDeletablesAndErasables(
reply3Lo,
reply3Hi
)
assert.deepEqual(
[...deletables],
[reply1Lo, reply1Hi, reply2],
'deletables'
)
assert.deepEqual([...erasables], [rootPost], 'erasables')
})
await t.test('Tangle.getDeletablesAndErasables with lipmaa', (t) => {
const { deletables, erasables } = tangle.getDeletablesAndErasables(reply3Lo)
t.deepEquals(deletables, [reply1Lo, reply1Hi, reply2A], 'deletables')
t.deepEquals(erasables, [rootPost], 'erasables')
t.end()
})
assert.deepEqual(
[...deletables],
[reply1Lo, reply1Hi, reply2],
'deletables'
)
assert.deepEqual([...erasables], [rootPost], 'erasables')
})
test('Tangle.topoSort after some have been deleted and erased', async (t) => {
await t.test('Tangle.getMinimumAmong', (t) => {
const actual1 = tangle.getMinimumAmong([reply1Lo, reply1Hi])
const expected1 = [reply1Lo, reply1Hi]
assert.deepEqual(actual1, expected1)
const actual2 = tangle.getMinimumAmong([reply1Lo, reply1Hi, reply2])
const expected2 = [reply1Lo, reply1Hi]
assert.deepEqual(actual2, expected2)
const actual3 = tangle.getMinimumAmong([reply2, reply3Lo, reply3Hi])
const expected3 = [reply2]
assert.deepEqual(actual3, expected3)
const actual4 = tangle.getMinimumAmong([reply1Hi, reply3Lo])
const expected4 = [reply1Hi]
assert.deepEqual(actual4, expected4)
})
await t.test('Tangle.slice', async (t) => {
{
const msgs = await tangle.slice()
const texts = msgs.map((msg) => msg.data?.text)
assert.deepEqual(texts, [
'root',
reply1LoText,
reply1HiText,
'reply 2',
reply3LoText,
reply3HiText,
])
}
{
const msgs = await tangle.slice([], [reply2])
const texts = msgs.map((msg) => msg.data?.text)
assert.deepEqual(texts, ['root', reply1LoText, reply1HiText, 'reply 2'])
}
{
const msgs = await tangle.slice([reply2], [])
const texts = msgs.map((msg) => msg.data?.text)
assert.deepEqual(texts, [
undefined, // root
undefined, // reply1Lo (no need to have a trail from reply1Hi)
'reply 2',
reply3LoText,
reply3HiText,
])
}
{
const msgs = await tangle.slice([reply2], [reply2])
const texts = msgs.map((msg) => msg.data?.text)
assert.deepEqual(texts, [
undefined, // root
undefined, // reply1Lo (no need to have a trail from reply1Hi)
'reply 2',
])
}
{
const msgs = await tangle.slice([reply2], [reply2, reply3Lo])
const texts = msgs.map((msg) => msg.data?.text)
assert.deepEqual(texts, [
undefined, // root
undefined, // reply1Lo (no need to have a trail from reply1Hi)
'reply 2',
reply3LoText,
])
}
})
await t.test('Tangle.topoSort after some deletes and erases', async (t) => {
const { deletables, erasables } = tangle.getDeletablesAndErasables(reply3Lo)
for (const msgHash of deletables) {
await p(peer.db.del)(msgHash)
for (const msgID of deletables) {
await p(peer.db.del)(msgID)
}
for (const msgHash of erasables) {
await p(peer.db.erase)(msgHash)
for (const msgID of erasables) {
await p(peer.db.erase)(msgID)
}
const tangle2 = peer.db.getTangle(rootPost)
const tangle2 = await p(peer.db.getTangle)(rootPost)
const sorted = tangle2.topoSort()
t.deepEquals(sorted, [rootPost, reply3Lo, reply3Hi])
})
assert.deepEqual(sorted, [rootPost, reply3Lo, reply3Hi])
})
test('teardown', async (t) => {
await p(peer.close)(true)
})

102
test/ghosts.tests.js Normal file
View File

@ -0,0 +1,102 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const MsgV4 = require('../lib/msg-v4')
const DIR = path.join(os.tmpdir(), 'pzp-db-ghosts')
rimraf.sync(DIR)
const keypair = Keypair.generate('ed25519', 'alice')
test('ghosts.add, ghosts.get, ghosts.getMinDepth', async (t) => {
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({ subdomain: 'person' })
const SPAN = 5
let msgIDs = []
for (let i = 0; i < 10; i++) {
const rec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'hello ' + i },
})
msgIDs.push(rec.id)
}
const tangleID = (await p(peer.db.feed.findMoot)(account, 'post'))?.id
const ghosts0 = peer.db.ghosts.get(tangleID)
assert.deepEqual(ghosts0, [], 'no ghosts so far')
await p(peer.db.ghosts.add)({ msgID: msgIDs[0], tangleID, span: SPAN })
await p(peer.db.ghosts.add)({ msgID: msgIDs[1], tangleID, span: SPAN })
await p(peer.db.ghosts.add)({ msgID: msgIDs[2], tangleID, span: SPAN })
await p(peer.db.ghosts.add)({ msgID: msgIDs[3], tangleID, span: SPAN })
await p(peer.db.ghosts.add)({ msgID: msgIDs[4], tangleID, span: SPAN })
const ghostsA = peer.db.ghosts.get(tangleID)
assert.deepEqual(ghostsA, msgIDs.slice(0, 5), 'ghosts so far')
const depthA = peer.db.ghosts.getMinDepth(tangleID)
assert.equal(depthA, 1, 'min depth so far')
await p(peer.db.ghosts.add)({ msgID: msgIDs[5], tangleID, span: SPAN })
const ghostsB = peer.db.ghosts.get(tangleID)
assert.deepEqual(ghostsB, msgIDs.slice(1, 6), 'ghosts so far')
const depthB = peer.db.ghosts.getMinDepth(tangleID)
assert.equal(depthB, 2, 'min depth so far')
await p(peer.close)(true)
})
test('ghosts.add queues very-concurrent calls', async (t) => {
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({ subdomain: 'person' })
const SPAN = 5
let msgIDs = []
for (let i = 0; i < 10; i++) {
const rec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'hello ' + i },
})
msgIDs.push(rec.id)
}
const moot = MsgV4.createMoot(account, 'post', keypair)
const tangleID = MsgV4.getMsgID(moot)
const ghosts0 = peer.db.ghosts.get(tangleID)
assert.deepEqual(ghosts0, [], 'no ghosts so far')
await Promise.all([
p(peer.db.ghosts.add)({ msgID: msgIDs[0], tangleID, span: SPAN }),
p(peer.db.ghosts.add)({ msgID: msgIDs[1], tangleID, span: SPAN }),
p(peer.db.ghosts.add)({ msgID: msgIDs[2], tangleID, span: SPAN }),
p(peer.db.ghosts.add)({ msgID: msgIDs[3], tangleID, span: SPAN }),
p(peer.db.ghosts.add)({ msgID: msgIDs[4], tangleID, span: SPAN }),
])
const ghostsA = peer.db.ghosts.get(tangleID)
assert.deepEqual(ghostsA, msgIDs.slice(0, 5), 'ghosts so far')
const depthA = peer.db.ghosts.getMinDepth(tangleID)
assert.equal(depthA, 1, 'min depth so far')
await p(peer.db.ghosts.add)({ msgID: msgIDs[5], tangleID, span: SPAN })
const ghostsB = peer.db.ghosts.get(tangleID)
assert.deepEqual(ghostsB, msgIDs.slice(1, 6), 'ghosts so far')
const depthB = peer.db.ghosts.getMinDepth(tangleID)
assert.equal(depthB, 2, 'min depth so far')
await p(peer.close)(true)
})

79
test/log/basic.test.js Normal file
View File

@ -0,0 +1,79 @@
const test = require('node:test')
const assert = require('node:assert')
const fs = require('node:fs')
const p = require('node:util').promisify
const Log = require('../../lib/log')
test('Log basics', async function (t) {
await t.test('Log handles basic binary records', async function (t) {
const file = '/tmp/pzp-db-log-test-basic-binary.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const msg1 = Buffer.from('testing')
const msg2 = Buffer.from('testing2')
const offset1 = await p(log.append)(msg1)
assert.equal(offset1, 0)
const offset2 = await p(log.append)(msg2)
assert.equal(offset2, msg1.length + 4)
const b1 = await p(log._get)(offset1)
assert.equal(b1.toString(), msg1.toString())
const b2 = await p(log._get)(offset2)
assert.equal(b2.toString(), msg2.toString())
await p(log.close)()
})
const json1 = { text: 'testing' }
const json2 = { test: 'testing2' }
await t.test('Log handles basic json records', async function (t) {
const file = '/tmp/pzp-db-log-test-basic-json.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, {
blockSize: 2 * 1024,
codec: require('flumecodec/json'),
})
const offset1 = await p(log.append)(json1)
assert.equal(offset1, 0)
const offset2 = await p(log.append)(json2)
assert.equal(offset2, 22)
const rec1 = await p(log._get)(offset1)
assert.deepEqual(rec1, json1)
const rec2 = await p(log._get)(offset2)
assert.deepEqual(rec2, json2)
await p(log.close)()
})
await t.test('Log handles basic json record re-reading', async function (t) {
const file = '/tmp/pzp-db-log-test-basic-json.log'
const log = Log(file, {
blockSize: 2 * 1024,
codec: require('flumecodec/json'),
})
await p(log.onDrain)()
assert.equal(log.lastRecOffset.value, 22)
const rec1 = await p(log._get)(0)
assert.deepEqual(rec1, json1)
const rec2 = await p(log._get)(22)
assert.deepEqual(rec2, json2)
await p(log.close)()
})
})

171
test/log/compact.test.js Normal file
View File

@ -0,0 +1,171 @@
const test = require('node:test')
const assert = require('node:assert')
const p = require('node:util').promisify
const Log = require('../../lib/log')
test('Log compaction', async (t) => {
await t.test('compact a log that does not have holes', async (t) => {
const file = '/tmp/pzp-db-log-compaction-test-' + Date.now() + '.log'
const log = Log(file, { blockSize: 15 })
const stats = await p(log.stats)()
assert.equal(stats.totalBytes, 0, 'stats.totalBytes (1)')
assert.equal(stats.deletedBytes, 0, 'stats.deletedBytes (1)')
const buf1 = Buffer.from('first')
const buf2 = Buffer.from('second')
const offset1 = await p(log.append)(buf1)
const offset2 = await p(log.append)(buf2)
await p(log.onDrain)()
assert('append two records')
const stats2 = await p(log.stats)()
assert.equal(stats2.totalBytes, 25, 'stats.totalBytes (2)')
assert.equal(stats2.deletedBytes, 0, 'stats.deletedBytes (2)')
const progressArr = []
log.compactionProgress((stats) => {
progressArr.push(stats)
})
await p(log.compact)()
assert.deepEqual(
progressArr,
[
{ percent: 0, done: false },
{ percent: 1, done: true, sizeDiff: 0, holesFound: 0 },
],
'progress events'
)
const stats3 = await p(log.stats)()
assert.equal(stats3.totalBytes, 25, 'stats.totalBytes (3)')
assert.equal(stats3.deletedBytes, 0, 'stats.deletedBytes (3)')
await new Promise((resolve, reject) => {
const arr = []
log.scan(
(offset, data, size) => {
arr.push(data)
},
(err) => {
if (err) return reject(err)
assert.deepEqual(arr, [buf1, buf2], 'both records exist')
resolve()
}
)
})
await p(log.close)()
})
await t.test('delete first record, compact, stream', async (t) => {
const file = '/tmp/pzp-db-log-compaction-test-' + Date.now() + '.log'
const log = Log(file, { blockSize: 15 })
const buf1 = Buffer.from('first')
const buf2 = Buffer.from('second')
const progressArr = []
log.compactionProgress((stats) => {
progressArr.push(stats)
})
const offset1 = await p(log.append)(buf1)
const offset2 = await p(log.append)(buf2)
await p(log.onDrain)()
assert('append two records')
const stats1 = await p(log.stats)()
assert.equal(stats1.totalBytes, 25, 'stats.totalBytes before')
assert.equal(stats1.deletedBytes, 0, 'stats.deletedBytes before')
await p(log.del)(offset1)
await p(log.onOverwritesFlushed)()
assert('delete first record')
await p(log.compact)()
assert.deepEqual(
progressArr,
[
{ percent: 0, done: false },
{ percent: 1, done: true, sizeDiff: 15, holesFound: 1 },
],
'progress events'
)
const stats2 = await p(log.stats)()
assert.equal(stats2.totalBytes, 10, 'stats.totalBytes after')
assert.equal(stats2.deletedBytes, 0, 'stats.deletedBytes after')
await new Promise((resolve, reject) => {
const arr = []
log.scan(
(offset, data, size) => {
arr.push(data)
},
(err) => {
if (err) return reject(err)
assert.deepEqual(arr, [buf2], 'only second record exists')
resolve()
}
)
})
await p(log.close)()
})
await t.test('delete last record, compact, stream', async (t) => {
const file = '/tmp/pzp-db-log-compaction-test-' + Date.now() + '.log'
const log = Log(file, { blockSize: 15 })
const buf1 = Buffer.from('first')
const buf2 = Buffer.from('second')
const buf3 = Buffer.from('third')
const offset1 = await p(log.append)(buf1)
const offset2 = await p(log.append)(buf2)
const offset3 = await p(log.append)(buf3)
await p(log.onDrain)()
assert('append three records')
await p(log.del)(offset3)
await p(log.onOverwritesFlushed)()
assert('delete third record')
await new Promise((resolve, reject) => {
const arr = []
log.scan(
(offset, data, size) => {
arr.push(data)
},
(err) => {
if (err) return reject(err)
assert.deepEqual(arr, [buf1, buf2, null], 'all blocks')
resolve()
}
)
})
await p(log.compact)()
await new Promise((resolve, reject) => {
const arr = []
log.scan(
(offset, data, size) => {
arr.push(data)
},
(err) => {
if (err) return reject(err)
assert.deepEqual(arr, [buf1, buf2], 'last block truncated away')
resolve()
}
)
})
await p(log.close)()
})
})

View File

@ -0,0 +1,175 @@
const test = require('node:test')
const assert = require('node:assert')
const fs = require('node:fs')
const p = require('node:util').promisify
const RAF = require('polyraf')
const Log = require('../../lib/log')
function encode(json) {
if (Buffer.isBuffer(json)) return json
return Buffer.from(JSON.stringify(json), 'utf8')
}
function decode(buf) {
return JSON.parse(buf.toString('utf8'))
}
test('Log handles corrupted records', async (t) => {
const file = '/tmp/pzp-db-log-corrupt-records.log'
await t.test('Simulate corruption', async (t) => {
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, {
blockSize: 64 * 1024,
codec: { encode, decode },
})
const msg1 = encode({ text: 'testing' })
const msg2 = encode({ bool: true, test: 'x' })
msg2[0] = 0x00
await p(log.append)(msg1)
await p(log.append)(msg2)
await p(log.onDrain)()
})
await test('Re-read without validation', async (t) => {
const log = Log(file, { blockSize: 64 * 1024 })
await p(log.onDrain)()
const arr = []
await new Promise((resolve, reject) => {
log.scan(
(offset, rec, size) => {
arr.push(rec)
},
(err) => {
if (err) reject(err)
else resolve()
}
)
})
// Because these are just buffers we won't see the corruption
assert.equal(arr.length, 2)
await p(log.close)()
})
await test('Re-read with validation', async (t) => {
const log = Log(file, {
blockSize: 64 * 1024,
validateRecord(buf) {
try {
decode(buf)
return true
} catch {
return false
}
},
})
await p(log.onDrain)()
const arr = []
await new Promise((resolve, reject) => {
log.scan(
(offset, rec, size) => {
arr.push(rec)
},
(err) => {
if (err) reject(err)
else resolve()
}
)
})
assert.equal(arr.length, 1)
await p(log.close)()
})
})
test('Log handles corrupted length', async (t) => {
const file = '/tmp/pzp-db-log-corrupt-length.log'
await t.test('Simulate length corruption', async (t) => {
try {
fs.unlinkSync(file)
} catch (_) {}
const raf = RAF(file)
let block = Buffer.alloc(64 * 1024)
const msg1 = encode({ text: 'testing' })
const msg2 = encode({ bool: true, test: 'testing2' })
block.writeUInt16LE(msg1.length, 0)
msg1.copy(block, 4)
block.writeUInt16LE(65534, 4 + msg1.length) // corrupt!
msg2.copy(block, 4 + msg1.length + 4)
await p(raf.write.bind(raf))(0, block)
await p(raf.close.bind(raf))()
})
await t.test('Re-read without validation', async (t) => {
const log = Log(file, { blockSize: 64 * 1024 })
await p(log.onDrain)()
const arr = []
await new Promise((resolve, reject) => {
log.scan(
(offset, rec, size) => {
arr.push(rec)
},
(err) => {
if (err) reject(err)
else resolve()
}
)
})
assert.equal(arr.length, 1)
const msg = encode({ bool: true, test: 'testing2' })
await p(log.append)(msg)
await p(log.close)()
})
await t.test('Re-read with validation', async (t) => {
const log = Log(file, {
blockSize: 64 * 1024,
validateRecord: (d) => {
try {
decode(d)
return true
} catch (ex) {
return false
}
},
})
await p(log.onDrain)()
const arr = []
await new Promise((resolve, reject) => {
log.scan(
(offset, rec, size) => {
arr.push(rec)
},
(err) => {
if (err) reject(err)
else resolve()
}
)
})
assert.equal(arr.length, 2)
await p(log.close)()
})
})

197
test/log/delete.test.js Normal file
View File

@ -0,0 +1,197 @@
const test = require('node:test')
const assert = require('node:assert')
const fs = require('node:fs')
const p = require('node:util').promisify
const Log = require('../../lib/log')
const msg1 = Buffer.from(
'hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world hello world'
)
const msg2 = Buffer.from(
'hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db hello offset db'
)
const msg3 = Buffer.from(
'hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db hello offsetty db'
)
test('Log deletes', async (t) => {
await t.test('Simple delete', async (t) => {
const file = '/tmp/pzp-db-log-test-del.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const offset1 = await p(log.append)(msg1)
assert.equal(offset1, 0)
const offset2 = await p(log.append)(msg2)
assert.ok(offset2 > offset1)
const offset3 = await p(log.append)(msg3)
assert.ok(offset3 > offset2)
const buf1 = await p(log._get)(offset1)
assert.equal(buf1.toString(), msg1.toString())
const buf2 = await p(log._get)(offset2)
assert.equal(buf2.toString(), msg2.toString())
const buf3 = await p(log._get)(offset3)
assert.equal(buf3.toString(), msg3.toString())
await p(log.del)(offset2)
await p(log.onOverwritesFlushed)()
await assert.rejects(p(log._get)(offset2), (err) => {
assert.ok(err)
assert.equal(err.message, 'Record has been deleted')
assert.equal(err.code, 'DELETED_RECORD')
return true
})
await p(log.close)()
})
await t.test('Deleted records are not invalid upon re-opening', async (t) => {
const file = '/tmp/pzp-db-log-test-del-invalid.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const opts = {
blockSize: 2 * 1024,
codec: {
encode(msg) {
return Buffer.from(JSON.stringify(msg), 'utf8')
},
decode(buf) {
return JSON.parse(buf.toString('utf8'))
},
},
validateRecord(buf) {
try {
JSON.parse(buf.toString('utf8'))
return true
} catch {
return false
}
},
}
const log = Log(file, opts)
const offset1 = await p(log.append)({ text: 'm0' })
const offset2 = await p(log.append)({ text: 'm1' })
const offset3 = await p(log.append)({ text: 'm2' })
await p(log.del)(offset2)
await p(log.onOverwritesFlushed)()
await p(log.close)()
const log2 = Log(file, opts)
let arr = []
await new Promise((resolve) => {
log2.scan(
(offset, value, size) => {
arr.push(value)
},
(err) => {
assert.ifError(err)
assert.deepEqual(arr, [{ text: 'm0' }, null, { text: 'm2' }])
resolve()
}
)
})
await assert.rejects(p(log2._get)(offset2), (err) => {
assert.ok(err)
assert.equal(err.message, 'Record has been deleted')
assert.equal(err.code, 'DELETED_RECORD')
return true
})
await p(log2.close)()
})
await t.test('Deletes are noticed by scan()', async (t) => {
const file = '/tmp/offset-test_' + Date.now() + '.log'
const log = Log(file, { blockSize: 64 * 1024 })
const buf1 = Buffer.from('hello one')
const buf2 = Buffer.from('hello two')
const offset1 = await p(log.append)(buf1)
const offset2 = await p(log.append)(buf2)
await p(log.del)(offset1)
await p(log.onDrain)()
await p(log.onOverwritesFlushed)()
const arr = []
await new Promise((resolve) => {
log.scan(
(offset, rec, length) => {
arr.push(rec)
},
(err) => {
assert.ifError(err)
resolve()
}
)
})
assert.deepEqual(arr, [null, buf2])
await p(log.close)()
})
await t.test(
'Many deleted records',
{ timeout: 3 * 60e3, skip: !!process.env.CI },
async (t) => {
const file = '/tmp/aaol-test-delete-many' + Date.now() + '.log'
const log = Log(file, { blockSize: 64 * 1024 })
const TOTAL = 100000
const offsets = []
const logAppend = p(log.append)
if (process.env.VERBOSE) console.time('append ' + TOTAL)
for (let i = 0; i < TOTAL; i += 1) {
const offset = await logAppend(Buffer.from(`hello ${i}`))
offsets.push(offset)
}
assert('appended records')
if (process.env.VERBOSE) console.timeEnd('append ' + TOTAL)
await p(log.onDrain)()
const logDel = p(log.del)
if (process.env.VERBOSE) console.time('delete ' + TOTAL / 2)
for (let i = 0; i < TOTAL; i += 2) {
await logDel(offsets[i])
}
if (process.env.VERBOSE) console.timeEnd('delete ' + TOTAL / 2)
assert('deleted messages')
await p(log.onOverwritesFlushed)()
await new Promise((resolve) => {
let i = 0
log.scan(
(offset, rec, length) => {
if (i % 2 === 0) {
if (rec !== null)
assert.fail('record ' + i + ' should be deleted')
} else {
if (rec === null)
assert.fail('record ' + i + ' should be present')
}
i += 1
},
(err) => {
assert.ifError(err)
resolve()
}
)
})
await p(log.close)()
}
)
})

View File

@ -0,0 +1,68 @@
const test = require('node:test')
const assert = require('node:assert')
const fs = require('node:fs')
const p = require('node:util').promisify
const Log = require('../../lib/log')
var file = '/tmp/ds-test_restart.log'
var msg1 = { text: 'hello world hello world' }
var msg2 = { text: 'hello world hello world 2' }
test('Log fix buggy write', async (t) => {
await t.test('Simple', async (t) => {
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, {
block: 16 * 1024,
codec: require('flumecodec/json'),
})
const offset1 = await p(log.append)(msg1)
assert.equal(offset1, 0)
const offset2 = await p(log.append)(msg2)
assert.equal(offset2, 38)
await p(log.onDrain)()
let arr = []
await new Promise((resolve) => {
log.scan(
(offset, msg, size) => {
arr.push(msg)
},
(err) => {
assert.ifError(err)
resolve()
}
)
})
assert.deepEqual(arr, [msg1, msg2])
await p(log.close)()
})
await t.test('Re-read', async (t) => {
const log = Log(file, {
block: 16 * 1024,
codec: require('flumecodec/json'),
})
await p(log.onDrain)()
let arr = []
await new Promise((resolve) => {
log.scan(
(offset, msg, size) => {
arr.push(msg)
},
(err) => {
assert.ifError(err)
resolve()
}
)
})
assert.deepEqual(arr, [msg1, msg2])
await p(log.close)()
})
})

View File

@ -0,0 +1,80 @@
const test = require('node:test')
const assert = require('node:assert')
const fs = require('node:fs')
const p = require('node:util').promisify
const Log = require('../../lib/log')
const msg1 = Buffer.from('hello world hello world hello world')
const msg2 = Buffer.from('ola mundo ola mundo ola mundo')
test('Log overwrites', async (t) => {
await t.test('Simple overwrite', async (t) => {
const file = '/tmp/pzp-db-log-test-overwrite.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const offset1 = await p(log.append)(msg1)
assert.equal(offset1, 0)
const offset2 = await p(log.append)(msg2)
assert.ok(offset2 > offset1)
const buf1 = await p(log._get)(offset1)
assert.equal(buf1.toString(), msg1.toString())
const buf2 = await p(log._get)(offset2)
assert.equal(buf2.toString(), msg2.toString())
await p(log.overwrite)(offset1, Buffer.from('hi world'))
await p(log.onOverwritesFlushed)()
const buf = await p(log._get)(offset1)
assert.equal(buf.toString(), 'hi world')
let arr = []
await new Promise((resolve, reject) => {
log.scan(
(offset, data, size) => {
arr.push(data.toString())
},
(err) => {
if (err) reject(err)
else resolve()
}
)
})
assert.deepEqual(arr, ['hi world', 'ola mundo ola mundo ola mundo'])
await p(log.close)()
})
await t.test('Cannot overwrite larger data', async (t) => {
const file = '/tmp/pzp-db-log-test-overwrite-larger.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const offset1 = await p(log.append)(msg1)
assert.equal(offset1, 0)
const offset2 = await p(log.append)(msg2)
assert.ok(offset2 > offset1)
const buf1 = await p(log._get)(offset1)
assert.equal(buf1.toString(), msg1.toString())
const buf2 = await p(log._get)(offset2)
assert.equal(buf2.toString(), msg2.toString())
const promise = p(log.overwrite)(
offset1,
Buffer.from('hello world hello world hello world hello world')
)
await assert.rejects(promise, (err) => {
assert.ok(err)
assert.match(err.message, /should not be larger than existing data/)
return true
})
await p(log.close)()
})
})

305
test/msg-v4/create.test.js Normal file
View File

@ -0,0 +1,305 @@
const test = require('node:test')
const assert = require('node:assert')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../../lib/msg-v4')
let account
test('MsgV4.createAccount()', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const accountMsg0 = MsgV4.createAccount(keypair, 'person', 'MYNONCE')
if (process.env.VERBOSE) console.log(JSON.stringify(accountMsg0, null, 2))
assert.deepEqual(
accountMsg0.data,
{
action: 'add',
key: {
purpose: 'shs-and-sig',
algorithm: 'ed25519',
bytes: keypair.public,
},
nonce: 'MYNONCE',
powers: ['add', 'del', 'external-encryption', 'internal-encryption'],
},
'data'
)
assert.equal(
accountMsg0.metadata.dataHash,
'4dDbfLtNMjzMgvvCA71tp6CiLjAa5bzzeHsbYuC4dpMT',
'hash'
)
assert.equal(accountMsg0.metadata.dataSize, 210, 'size')
assert.equal(accountMsg0.metadata.account, 'self', 'account')
assert.equal(accountMsg0.metadata.accountTips, null, 'accountTips')
assert.deepEqual(accountMsg0.metadata.tangles, {}, 'tangles')
assert.equal(accountMsg0.metadata.domain, 'person', 'domain')
assert.equal(accountMsg0.metadata.v, 4, 'v')
assert.equal(accountMsg0.sigkey, keypair.public, 'sigkey')
assert.equal(MsgV4.isFeedMsg(accountMsg0), false, 'not a feed msg')
account = MsgV4.getMsgID(accountMsg0)
assert.equal(
account,
'Lq6xwbdvGVmSsY3oYRugpZ3DY8chX9SLhRhjJKyZHQn',
'account ID'
)
})
let moot = null
let mootID = null
test('MsgV4.createMoot()', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
moot = MsgV4.createMoot(account, 'post', keypair)
if (process.env.VERBOSE) console.log(JSON.stringify(moot, null, 2))
assert.equal(moot.data, null, 'data')
assert.equal(moot.metadata.dataHash, null, 'hash')
assert.equal(moot.metadata.dataSize, 0, 'size')
assert.equal(moot.metadata.account, account, 'account')
assert.equal(moot.metadata.accountTips, null, 'accountTips')
assert.deepEqual(moot.metadata.tangles, {}, 'tangles')
assert.equal(moot.metadata.domain, 'post', 'domain')
assert.equal(moot.metadata.v, 4, 'v')
assert.equal(moot.sigkey, keypair.public, 'sigkey')
assert.equal(MsgV4.isFeedMsg(moot), false, 'not a feed msg')
mootID = MsgV4.getMsgID(moot)
assert.equal(
mootID,
'HH3P5muTjZkQC7uRKpzczGWbPNZBtk4BR4msyCNjwxpU',
'moot ID'
)
})
test('MsgV4.create()', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const data = { text: 'Hello world!' }
const tangle1 = new MsgV4.Tangle(mootID)
tangle1.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data,
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle1,
},
})
if (process.env.VERBOSE) console.log(JSON.stringify(msg1, null, 2))
assert.deepEqual(msg1.data, data, 'data')
assert.deepEqual(
Object.keys(msg1.metadata),
[
'dataHash',
'dataSize',
'account',
'accountTips',
'tangles',
'domain',
'v',
],
'metadata shape'
)
assert.deepEqual(
msg1.metadata.dataHash,
'APwSxrZUBx5wTHcT42fJTyddEjqkEAPXVMwaczTSuHTJ',
'metadata.dataHash'
)
assert.deepEqual(msg1.metadata.dataSize, 23, 'metadata.dataSize')
assert.equal(msg1.metadata.account, account, 'metadata.account')
assert.deepEqual(msg1.metadata.accountTips, [account], 'metadata.accountTips')
assert.deepEqual(
Object.keys(msg1.metadata.tangles),
[mootID],
'metadata.tangles'
)
assert.equal(msg1.metadata.tangles[mootID].depth, 1, 'tangle depth')
assert.deepEqual(msg1.metadata.tangles[mootID].prev, [mootID], 'tangle prev')
assert.equal(msg1.metadata.domain, 'post', 'metadata.domain')
assert.deepEqual(msg1.metadata.v, 4, 'metadata.v')
assert.equal(
msg1.sigkey,
'4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW',
'sigkey'
)
assert.equal(
msg1.sig,
'58LBLLJtqqRUteQRS5djhK2xxTG4VKjwibjKirqXU4LQKijD59NnrnHag5JsL54srJdhseSYaDhQoaWacbMd82v3',
'sig'
)
assert.equal(MsgV4.isFeedMsg(msg1), true, 'is a feed msg')
const msgID1 = '4hFeNiBSrRaxW1PKxJd6QDju4B1kZGT8g2LBHwGSpz6M'
assert.equal(MsgV4.getMsgID(msg1), msgID1, 'getMsgID')
const tangle2 = new MsgV4.Tangle(mootID)
tangle2.add(mootID, moot)
tangle2.add(msgID1, msg1)
const data2 = { text: 'Ola mundo!' }
const msg2 = MsgV4.create({
keypair,
data: data2,
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle2,
},
})
if (process.env.VERBOSE) console.log(JSON.stringify(msg2, null, 2))
assert.deepEqual(msg2.data, data2, 'data')
assert.deepEqual(
Object.keys(msg2.metadata),
[
'dataHash',
'dataSize',
'account',
'accountTips',
'tangles',
'domain',
'v',
],
'metadata shape'
)
assert.deepEqual(
msg2.metadata.dataHash,
'D8AD5odaS2YizdvmqZacQ1XVNmRxgw9hXoEvSuPYpa8G',
'metadata.dataHash'
)
assert.deepEqual(msg2.metadata.dataSize, 21, 'metadata.dataSize')
assert.equal(msg2.metadata.account, account, 'metadata.account')
assert.deepEqual(msg2.metadata.accountTips, [account], 'metadata.accountTips')
assert.deepEqual(
Object.keys(msg2.metadata.tangles),
[mootID],
'metadata.tangles'
)
assert.equal(msg2.metadata.tangles[mootID].depth, 2, 'tangle depth')
assert.deepEqual(msg2.metadata.tangles[mootID].prev, [msgID1], 'tangle prev')
assert.equal(msg2.metadata.domain, 'post', 'metadata.domain')
assert.deepEqual(msg2.metadata.v, 4, 'metadata.v')
assert.equal(
msg2.sigkey,
'4mjQ5aJu378cEu6TksRG3uXAiKFiwGjYQtWAjfVjDAJW',
'sigkey'
)
assert.equal(
msg2.sig,
'5KEQBLYg5iYhd3R8rSTtH4uPwVAQvwuXhNE9wmNEFiJtNCkHkNdrZ8X85bRsdekqgewvmPtue27QcqgcT2m4gjmS',
'sig'
)
assert.deepEqual(
MsgV4.getMsgID(msg2),
'CrMez268VffqRiHvSZe6DtGVSfBhXWqfEh7D2ftPEbQ3',
'getMsgID'
)
})
test('MsgV4.create() handles DAG tips correctly', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data: { text: '1' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID1 = MsgV4.getMsgID(msg1)
assert.deepEqual(
msg1.metadata.tangles[mootID].prev,
[MsgV4.getMootID(account, 'post')],
'msg1.prev is root'
)
tangle.add(msgID1, msg1)
const msg2A = MsgV4.create({
keypair,
data: { text: '2A' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
assert.deepEqual(
msg2A.metadata.tangles[mootID].prev,
[msgID1],
'msg2A.prev is msg1'
)
const msg2B = MsgV4.create({
keypair,
data: { text: '2B' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID2B = MsgV4.getMsgID(msg2B)
assert.deepEqual(
msg2B.metadata.tangles[mootID].prev,
[msgID1],
'msg2B.prev is msg1'
)
tangle.add(msgID2B, msg2B)
const msg3 = MsgV4.create({
keypair,
data: { text: '3' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID3 = MsgV4.getMsgID(msg3)
assert.deepEqual(
msg3.metadata.tangles[mootID].prev,
[mootID, msgID2B].sort(),
'msg3.prev is [root(lipmaa),msg2B(previous)], sorted'
)
tangle.add(msgID3, msg3)
const msgID2A = MsgV4.getMsgID(msg2A)
tangle.add(msgID2A, msg2A)
// t.pass('msg2A comes into awareness')
const msg4 = MsgV4.create({
keypair,
data: { text: '4' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
assert.deepEqual(
msg4.metadata.tangles[mootID].prev,
[msgID3, msgID2A].sort(),
'msg4.prev is [msg3(previous),msg2A(old fork as tip)], sorted'
)
})

View File

@ -0,0 +1,86 @@
const test = require('node:test')
const assert = require('node:assert')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../../lib/msg-v4')
test('MsgV4 domain validation', async (t) => {
await t.test('Not a string', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
assert.throws(
() => {
MsgV4.create({
keypair,
data: { text: 'Hello world!' },
domain: 123,
})
},
/invalid domain/,
'not a string'
)
})
await t.test('"/" character', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
assert.throws(
() => {
MsgV4.create({
keypair,
data: { text: 'Hello world!' },
domain: 'group/init',
})
},
/invalid domain/,
'invalid domain if contains /'
)
})
await t.test('"*" character', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
assert.throws(
() => {
MsgV4.create({
keypair,
data: { text: 'Hello world!' },
domain: 'star*',
})
},
/invalid domain/,
'invalid domain if contains *'
)
})
await t.test('Too short', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
assert.throws(
() => {
MsgV4.create({
keypair,
data: { text: 'Hello world!' },
domain: 'xy',
})
},
/shorter than 3/,
'invalid domain if too short'
)
})
await t.test('too long', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
assert.throws(
() => {
MsgV4.create({
keypair,
data: { text: 'Hello world!' },
domain: 'a'.repeat(120),
})
},
/100\+ characters long/,
'invalid domain if too long'
)
})
})

View File

@ -0,0 +1,349 @@
const test = require('node:test')
const assert = require('node:assert')
const base58 = require('bs58')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../../lib/msg-v4')
const keypair = Keypair.generate('ed25519', 'alice')
const account = MsgV4.getMsgID(
MsgV4.createAccount(keypair, 'person', 'MYNONCE')
)
const sigkeys = new Set([keypair.public])
test('MsgV4 tangles prev validation', async (t) => {
await t.test('Non-array is a bad prev', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
msg.metadata.tangles[mootID].prev = null
const msgID = MsgV4.getMsgID(msg)
const err = MsgV4.validate(msg, tangle, sigkeys, msgID, mootID)
assert.ok(err, 'invalid 2nd msg throws')
assert.match(
err,
/prev ".*" should have been an array/,
'invalid 2nd msg description'
)
})
await t.test('Number not allowed in prev', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID1 = MsgV4.getMsgID(msg1)
tangle.add(msgID1, msg1)
const msg2 = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
msg2.metadata.tangles[mootID].depth = 1
msg2.metadata.tangles[mootID].prev = [1234]
const msgID2 = MsgV4.getMsgID(msg2)
const err = MsgV4.validate(msg2, tangle, sigkeys, msgID2, mootID)
assert.ok(err, 'invalid 2nd msg throws')
assert.match(
err,
/prev item ".*" should have been a string/,
'invalid 2nd msg description'
)
})
await t.test('URI not allowed in prev', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID1 = MsgV4.getMsgID(msg1)
tangle.add(msgID1, msg1)
const msg2 = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID2 = MsgV4.getMsgID(msg2)
const randBuf = Buffer.alloc(16).fill(16)
const fakeMsgKey1 = `pzp:message/v4/${base58.encode(randBuf)}`
msg2.metadata.tangles[mootID].depth = 1
msg2.metadata.tangles[mootID].prev = [fakeMsgKey1]
const err = MsgV4.validate(msg2, tangle, sigkeys, msgID2, mootID)
assert.ok(err, 'invalid 2nd msg throws')
assert.match(err, /prev item ".*" is a URI/, 'invalid 2nd msg description')
})
await t.test('Locally unknown prev msgID', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID1 = MsgV4.getMsgID(msg1)
tangle.add(msgID1, msg1)
const unknownMsg = MsgV4.create({
keypair,
data: { text: 'Alien' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const unknownMsgID = MsgV4.getMsgID(unknownMsg)
const fakeMootID = 'ABCDEabcde' + mootID.substring(10)
const tangle2 = new MsgV4.Tangle(fakeMootID)
tangle2.add(fakeMootID, moot)
tangle2.add(unknownMsgID, unknownMsg)
const msg2 = MsgV4.create({
keypair,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle2,
},
})
const msgID2 = MsgV4.getMsgID(msg2)
const err = MsgV4.validate(msg2, tangle, sigkeys, msgID2, mootID)
assert.ok(err, 'invalid 2nd msg throws')
assert.match(
err,
/all prev are locally unknown/,
'invalid 2nd msg description'
)
})
await t.test('Feed msg with the wrong sigkey', (t) => {
const keypairA = Keypair.generate('ed25519', 'alice')
const keypairB = Keypair.generate('ed25519', 'bob')
const accountB = MsgV4.getMsgID(
MsgV4.createAccount(keypairB, 'person', 'MYNONCE')
)
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const feedTangle = new MsgV4.Tangle(mootID)
feedTangle.add(mootID, moot)
const msg = MsgV4.create({
keypair: keypairB,
data: { text: 'Hello world!' },
account: accountB,
accountTips: [accountB],
domain: 'post',
tangles: {
[mootID]: feedTangle,
},
})
const msgID = MsgV4.getMsgID(msg)
const err = MsgV4.validate(msg, feedTangle, sigkeys, msgID, mootID)
assert.ok(err, 'invalid msg throws')
assert.match(
err,
/sigkey ".*" should have been one of ".*" from the account ".*"/,
'invalid msg'
)
})
await t.test('Feed msg with the wrong domain', (t) => {
const keypairA = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const feedTangle = new MsgV4.Tangle(mootID)
feedTangle.add(mootID, moot)
const msg = MsgV4.create({
keypair: keypairA,
data: { text: 'Hello world!' },
account,
accountTips: [account],
domain: 'comment',
tangles: {
[mootID]: feedTangle,
},
})
const msgID = MsgV4.getMsgID(msg)
const err = MsgV4.validate(msg, feedTangle, sigkeys, msgID, mootID)
assert.ok(err, 'invalid msg throws')
assert.match(
err,
/domain "comment" should have been feed domain "post"/,
'invalid feed msg'
)
})
await t.test('Feed msg with non-alphabetically sorted prev', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data: { text: '1' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID1 = MsgV4.getMsgID(msg1)
const msg2 = MsgV4.create({
keypair,
data: { text: '2' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID2 = MsgV4.getMsgID(msg2)
tangle.add(msgID1, msg1)
tangle.add(msgID2, msg2)
const msg3 = MsgV4.create({
keypair,
data: { text: '3' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID3 = MsgV4.getMsgID(msg3)
let prevMsgIDs = msg3.metadata.tangles[mootID].prev
if (prevMsgIDs[0] < prevMsgIDs[1]) {
prevMsgIDs = [prevMsgIDs[1], prevMsgIDs[0]]
} else {
prevMsgIDs = [prevMsgIDs[0], prevMsgIDs[1]]
}
msg3.metadata.tangles[mootID].prev = prevMsgIDs
const err = MsgV4.validate(msg3, tangle, sigkeys, msgID3, mootID)
assert.ok(err, 'invalid 3rd msg throws')
assert.match(
err,
/prev ".*" should have been alphabetically sorted/,
'invalid error message'
)
})
await t.test('Feed msg with duplicate prev', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
keypair,
data: { text: '1' },
account,
accountTips: [account],
domain: 'post',
tangles: {
[mootID]: tangle,
},
})
const msgID1 = MsgV4.getMsgID(msg1)
const [prevID] = msg1.metadata.tangles[mootID].prev
msg1.metadata.tangles[mootID].prev = [prevID, prevID]
const err = MsgV4.validate(msg1, tangle, sigkeys, msgID1, mootID)
assert.ok(err, 'invalid 1st msg throws')
assert.match(err, /prev ".*" contains duplicates/, 'invalid error message')
})
})

130
test/msg-v4/lipmaa.test.js Normal file
View File

@ -0,0 +1,130 @@
const test = require('node:test')
const assert = require('node:assert')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../../lib/msg-v4')
test('MsgV4 lipmaa prevs', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const account = MsgV4.getMsgID(
MsgV4.createAccount(keypair, 'person', 'MYNONCE')
)
const data = { text: 'Hello world!' }
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data,
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID1 = MsgV4.getMsgID(msg1)
tangle.add(msgID1, msg1)
assert.equal(msg1.metadata.tangles[mootID].depth, 1, 'msg1 depth')
assert.deepEqual(msg1.metadata.tangles[mootID].prev, [mootID], 'msg1 prev')
const msg2 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data,
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID2 = MsgV4.getMsgID(msg2)
tangle.add(msgID2, msg2)
assert.equal(msg2.metadata.tangles[mootID].depth, 2, 'msg2 depth')
assert.deepEqual(msg2.metadata.tangles[mootID].prev, [msgID1], 'msg2 prev')
const msg3 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data,
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID3 = MsgV4.getMsgID(msg3)
tangle.add(msgID3, msg3)
assert.equal(msg3.metadata.tangles[mootID].depth, 3, 'msg3 depth')
assert.deepEqual(
msg3.metadata.tangles[mootID].prev,
[mootID, msgID2].sort(),
'msg3 prev (has lipmaa!)'
)
const msg4 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
keypair,
tangles: {
[mootID]: tangle,
},
data,
})
const msgID4 = MsgV4.getMsgID(msg4)
tangle.add(msgID4, msg4)
assert.equal(msg4.metadata.tangles[mootID].depth, 4, 'msg4 depth')
assert.deepEqual(msg4.metadata.tangles[mootID].prev, [msgID3], 'msg4 prev')
const msg5 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data,
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID5 = MsgV4.getMsgID(msg5)
tangle.add(msgID5, msg5)
assert.equal(msg5.metadata.tangles[mootID].depth, 5, 'msg5 depth')
assert.deepEqual(msg5.metadata.tangles[mootID].prev, [msgID4], 'msg5 prev')
const msg6 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data,
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID6 = MsgV4.getMsgID(msg6)
tangle.add(msgID6, msg6)
assert.equal(msg6.metadata.tangles[mootID].depth, 6, 'msg6 depth')
assert.deepEqual(msg6.metadata.tangles[mootID].prev, [msgID5], 'msg6 prev')
const msg7 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data,
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID7 = MsgV4.getMsgID(msg7)
tangle.add(msgID7, msg7)
assert.equal(msg7.metadata.tangles[mootID].depth, 7, 'msg7 depth')
assert.deepEqual(
msg7.metadata.tangles[mootID].prev,
[msgID3, msgID6].sort(),
'msg7 prev (has lipmaa!)'
)
})

240
test/msg-v4/tangles.test.js Normal file
View File

@ -0,0 +1,240 @@
const test = require('node:test')
const assert = require('node:assert')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../../lib/msg-v4')
test('MsgV4.Tangle simple multi-author tangle', (t) => {
const keypairA = Keypair.generate('ed25519', 'alice')
const keypairB = Keypair.generate('ed25519', 'bob')
const accountA = MsgV4.getMsgID(
MsgV4.createAccount(keypairA, 'person', 'alice')
)
const accountB = MsgV4.getMsgID(
MsgV4.createAccount(keypairB, 'person', 'bob')
)
const mootA = MsgV4.createMoot(accountA, 'post', keypairA)
const mootAID = MsgV4.getMsgID(mootA)
const tangleA = new MsgV4.Tangle(mootAID)
tangleA.add(mootAID, mootA)
assert.equal(tangleA.id, mootAID, 'tangle.id')
assert.equal(tangleA.root, mootA, 'tangle.root')
const mootB = MsgV4.createMoot(accountB, 'post', keypairB)
const mootBID = MsgV4.getMsgID(mootB)
const tangleB = new MsgV4.Tangle(mootBID)
tangleB.add(mootBID, mootB)
const msg1 = MsgV4.create({
account: accountA,
accountTips: [accountA],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootAID]: tangleA,
},
keypair: keypairA,
})
const msgID1 = MsgV4.getMsgID(msg1)
assert.deepEqual(
Object.keys(msg1.metadata.tangles),
[mootAID],
'msg1 has only feed tangle'
)
const tangleX = new MsgV4.Tangle(msgID1)
tangleX.add(msgID1, msg1)
const msg2 = MsgV4.create({
account: accountB,
accountTips: [accountB],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootBID]: tangleB,
[msgID1]: tangleX,
},
keypair: keypairB,
})
assert.deepEqual(
Object.keys(msg2.metadata.tangles).sort(),
[mootBID, msgID1].sort(),
'msg2 has feed tangle and misc tangle'
)
assert.equal(
msg2.metadata.tangles[mootBID].depth,
1,
'msg2 feed tangle depth'
)
assert.deepEqual(
msg2.metadata.tangles[mootBID].prev,
[mootBID],
'msg2 feed tangle prev'
)
assert.equal(
msg2.metadata.tangles[msgID1].depth,
1,
'msg2 has tangle depth 1'
)
assert.deepEqual(
msg2.metadata.tangles[msgID1].prev,
[msgID1],
'msg2 has tangle prev'
)
})
test('MsgV4.Tangle lipmaa in multi-author tangle', (t) => {
const keypairA = Keypair.generate('ed25519', 'alice')
const keypairB = Keypair.generate('ed25519', 'bob')
const accountA = MsgV4.getMsgID(
MsgV4.createAccount(keypairA, 'person', 'alice')
)
const accountB = MsgV4.getMsgID(
MsgV4.createAccount(keypairB, 'person', 'bob')
)
const data = { text: 'Hello world!' }
const mootA = MsgV4.createMoot(accountA, 'post', keypairA)
const mootAID = MsgV4.getMsgID(mootA)
const tangleA = new MsgV4.Tangle(mootAID)
tangleA.add(mootAID, mootA)
const mootB = MsgV4.createMoot(accountB, 'post', keypairB)
const mootBID = MsgV4.getMsgID(mootB)
const tangleB = new MsgV4.Tangle(mootBID)
tangleB.add(mootBID, mootB)
const msg1 = MsgV4.create({
account: accountA,
accountTips: [accountA],
domain: 'post',
data,
tangles: {
[mootAID]: tangleA,
},
keypair: keypairA,
})
const msgID1 = MsgV4.getMsgID(msg1)
tangleA.add(msgID1, msg1)
const tangleThread = new MsgV4.Tangle(msgID1)
tangleThread.add(msgID1, msg1)
assert.deepEqual(
Object.keys(msg1.metadata.tangles),
[mootAID],
'A:msg1 has only feed tangle'
)
const msg2 = MsgV4.create({
account: accountB,
accountTips: [accountB],
domain: 'post',
data,
tangles: {
[mootBID]: tangleB,
[msgID1]: tangleThread,
},
keypair: keypairB,
})
const msgID2 = MsgV4.getMsgID(msg2)
tangleB.add(msgID2, msg2)
tangleThread.add(msgID2, msg2)
assert.deepEqual(
msg2.metadata.tangles[msgID1].prev,
[msgID1],
'B:msg2 points to A:msg1'
)
const msg3 = MsgV4.create({
account: accountB,
accountTips: [accountB],
domain: 'post',
data,
tangles: {
[mootBID]: tangleB,
[msgID1]: tangleThread,
},
keypair: keypairB,
})
const msgID3 = MsgV4.getMsgID(msg3)
tangleB.add(msgID3, msg3)
tangleThread.add(msgID3, msg3)
assert.deepEqual(
msg3.metadata.tangles[msgID1].prev,
[msgID2],
'B:msg3 points to B:msg2'
)
const msg4 = MsgV4.create({
account: accountA,
accountTips: [accountA],
domain: 'post',
data,
tangles: {
[mootAID]: tangleA,
[msgID1]: tangleThread,
},
keypair: keypairA,
})
const msgID4 = MsgV4.getMsgID(msg4)
tangleB.add(msgID4, msg4)
tangleThread.add(msgID4, msg4)
assert.deepEqual(
msg4.metadata.tangles[msgID1].prev,
[msgID1, msgID3].sort(),
'A:msg4 points to A:msg1,B:msg3'
)
})
test('MsgV4.Tangle can add msgs in random order', (t) => {
const keypairA = Keypair.generate('ed25519', 'alice')
const accountA = MsgV4.getMsgID(
MsgV4.createAccount(keypairA, 'person', 'alice')
)
const mootA = MsgV4.createMoot(accountA, 'post', keypairA)
const mootAID = MsgV4.getMsgID(mootA)
const tangleBuilder = new MsgV4.Tangle(mootAID)
tangleBuilder.add(mootAID, mootA)
const msg1 = MsgV4.create({
account: accountA,
accountTips: [accountA],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootAID]: tangleBuilder,
},
keypair: keypairA,
})
const msgID1 = MsgV4.getMsgID(msg1)
tangleBuilder.add(msgID1, msg1)
const msg2 = MsgV4.create({
account: accountA,
accountTips: [accountA],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootAID]: tangleBuilder,
},
keypair: keypairA,
})
const msgID2 = MsgV4.getMsgID(msg2)
tangleBuilder.add(msgID1, msg1)
const tangle = new MsgV4.Tangle(mootAID)
tangle.add(mootAID, mootA)
tangle.add(msgID2, msg2)
tangle.add(msgID1, msg1)
assert.deepEqual(tangle.topoSort(), [mootAID, msgID1, msgID2])
assert.deepEqual([...tangle.tips], [msgID2], 'tangle tips')
})

View File

@ -0,0 +1,163 @@
const test = require('node:test')
const assert = require('node:assert')
const Keypair = require('pzp-keypair')
const MsgV4 = require('../../lib/msg-v4')
test('MsgV4 validation', async (t) => {
await t.test('Correct root msg', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const account = MsgV4.getMsgID(
MsgV4.createAccount(keypair, 'person', 'alice')
)
const sigkeys = new Set([keypair.public])
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const err = MsgV4.validate(moot, tangle, sigkeys, mootID, mootID)
assert.ifError(err, 'valid root msg')
})
await t.test('Correct account tangle', (t) => {
const sigkeys = new Set()
const keypair1 = Keypair.generate('ed25519', 'alice')
sigkeys.add(keypair1.public)
const accountMsg0 = MsgV4.createAccount(keypair1, 'person', 'alice')
const account = MsgV4.getMsgID(accountMsg0)
const accountMsg0ID = account
const tangle = new MsgV4.Tangle(account)
tangle.add(accountMsg0ID, accountMsg0)
let err = MsgV4.validate(
accountMsg0,
tangle,
sigkeys,
accountMsg0ID,
account
)
assert.ifError(err, 'valid account root msg')
tangle.add(account, accountMsg0)
const keypair2 = Keypair.generate('ed25519', 'bob')
const accountMsg1 = MsgV4.create({
account: 'self',
accountTips: null,
domain: 'account',
data: { add: keypair2.public },
tangles: {
[account]: tangle,
},
keypair: keypair1, // announcing keypair2 but signing with keypair1
})
const accountMsg1ID = MsgV4.getMsgID(accountMsg1)
err = MsgV4.validate(accountMsg1, tangle, sigkeys, accountMsg1ID, account)
assert.ifError(err, 'valid account msg')
})
await t.test('2nd msg correct with existing root', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const account = MsgV4.getMsgID(
MsgV4.createAccount(keypair, 'person', 'alice')
)
const sigkeys = new Set([keypair.public])
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID1 = MsgV4.getMsgID(msg1)
tangle.add(msgID1, msg1)
const err = MsgV4.validate(msg1, tangle, sigkeys, msgID1, mootID)
assert.ifError(err, 'valid 2nd msg')
})
await t.test('2nd forked msg correct', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const account = MsgV4.getMsgID(
MsgV4.createAccount(keypair, 'person', 'alice')
)
const sigkeys = new Set([keypair.public])
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1A = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID1A = MsgV4.getMsgID(msg1A)
const msg1B = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootID]: tangle,
},
keypair,
})
const msgID1B = MsgV4.getMsgID(msg1B)
tangle.add(msgID1A, msg1A)
tangle.add(msgID1B, msg1B)
const err = MsgV4.validate(msg1B, tangle, sigkeys, msgID1B, mootID)
assert.ifError(err, 'valid 2nd forked msg')
})
await t.test('Correct erased msg', (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const account = MsgV4.getMsgID(
MsgV4.createAccount(keypair, 'person', 'alice')
)
const sigkeys = new Set([keypair.public])
const moot = MsgV4.createMoot(account, 'post', keypair)
const mootID = MsgV4.getMsgID(moot)
const tangle = new MsgV4.Tangle(mootID)
tangle.add(mootID, moot)
const msg1 = MsgV4.create({
account,
accountTips: [account],
domain: 'post',
data: { text: 'Hello world!' },
tangles: {
[mootID]: tangle,
},
keypair,
})
msg1.data = null
const msgID1 = MsgV4.getMsgID(msg1)
const err = MsgV4.validate(msg1, tangle, sigkeys, msgID1, mootID)
assert.ifError(err, 'valid erased msg')
})
})

View File

@ -1,43 +1,44 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-msgs-iter')
const DIR = path.join(os.tmpdir(), 'pzp-db-msgs-iter')
rimraf.sync(DIR)
test('msgs() iterator', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.call(null, { keys, path: DIR })
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({ subdomain: 'person' })
for (let i = 0; i < 6; i++) {
await p(peer.db.create)({
type: i % 2 === 0 ? 'post' : 'about',
content:
await p(peer.db.feed.publish)({
account,
domain: i % 2 === 0 ? 'post' : 'about',
data:
i % 2 === 0
? { text: 'hello ' + i }
: { about: peer.id, name: 'Mr. #' + i },
: { about: keypair.public, name: 'Mr. #' + i },
})
}
const posts = []
const abouts = []
for (const msg of peer.db.msgs()) {
if (!msg.content) continue
if (msg.metadata.type === 'post') posts.push(msg.content.text)
else if (msg.metadata.type === 'about') abouts.push(msg.content.name)
for await (const msg of peer.db.msgs()) {
if (!msg.data) continue
if (msg.metadata.domain === 'post') posts.push(msg.data.text)
else if (msg.metadata.domain === 'about') abouts.push(msg.data.name)
}
t.deepEqual(posts, ['hello 0', 'hello 2', 'hello 4'], 'queried posts')
t.deepEqual(abouts, ['Mr. #1', 'Mr. #3', 'Mr. #5'], 'queried abouts')
assert.deepEqual(posts, ['hello 0', 'hello 2', 'hello 4'], 'queried posts')
assert.deepEqual(abouts, ['Mr. #1', 'Mr. #3', 'Mr. #5'], 'queried abouts')
await p(peer.close)(true)
})

View File

@ -1,40 +1,61 @@
const test = require('tape')
const path = require('path')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const os = require('os')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-on-msg-added')
const DIR = path.join(os.tmpdir(), 'pzp-db-on-msg-added')
rimraf.sync(DIR)
test('onRecordAdded', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.call(null, { keys, path: DIR })
const peer = createPeer({
keypair: Keypair.generate('ed25519', 'alice'),
path: DIR,
})
await peer.db.loaded()
const listened = []
var remove = peer.db.onRecordAdded((ev) => {
listened.push(ev)
const account = await p(peer.db.account.create)({
subdomain: 'person',
_nonce: 'alice',
})
const rec1 = await p(peer.db.create)({
type: 'post',
content: { text: 'I am hungry' },
let publishedRec1 = false
const listenedRecs = []
var remove = peer.db.onRecordAdded((rec) => {
listenedRecs.push(rec)
if (rec.msg.data?.text === 'I am hungry') {
assert.equal(publishedRec1, true, 'onRecordAdded triggered after publish')
}
})
t.equal(rec1.msg.content.text, 'I am hungry', 'msg1 text correct')
const rec1 = await new Promise((resolve, reject) => {
peer.db.feed.publish(
{
account,
domain: 'post',
data: { text: 'I am hungry' },
},
(err, rec) => {
publishedRec1 = true
if (err) reject(err)
else resolve(rec)
}
)
})
assert.equal(rec1.msg.data.text, 'I am hungry', 'msg1 text correct')
await p(setTimeout)(500)
t.equal(listened.length, 2)
t.deepEquals(listened[0].msg.content, null, 'root')
t.deepEquals(listened[0].msg.metadata.size, 0, 'root')
t.deepEquals(listened[1], rec1, 'actual record')
assert.equal(listenedRecs.length, 3)
assert.equal(listenedRecs[0].msg.metadata.account, 'self', 'account root')
assert.equal(listenedRecs[1].msg.data, null, 'root')
assert.equal(listenedRecs[1].msg.metadata.dataSize, 0, 'root')
assert.deepEqual(listenedRecs[2], rec1, 'actual record')
remove()
await p(peer.close)(true)

View File

@ -0,0 +1,56 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'pzp-db-on-record-deleted-or-erased')
rimraf.sync(DIR)
test('onRecordDeletedOrErased()', async (t) => {
const peer = createPeer({
keypair: Keypair.generate('ed25519', 'alice'),
path: DIR,
})
await peer.db.loaded()
const id = await p(peer.db.account.create)({
subdomain: 'person',
_nonce: 'alice',
})
const msgIDs = []
for (let i = 0; i < 5; i++) {
const rec = await p(peer.db.feed.publish)({
account: id,
domain: 'post',
data: { text: 'm' + i },
})
msgIDs.push(rec.id)
}
const listened1 = []
const remove1 = peer.db.onRecordDeletedOrErased((msgID) => {
listened1.push(msgID)
})
assert.deepEqual(listened1, [], '(nothing)')
await p(peer.db.erase)(msgIDs[2])
assert.deepEqual(listened1, [msgIDs[2]], 'erased')
remove1()
const listened2 = []
const remove2 = peer.db.onRecordDeletedOrErased((msgID) => {
listened2.push(msgID)
})
assert.deepEqual(listened2, [msgIDs[2]], 'erased')
await p(peer.db.del)(msgIDs[1])
assert.deepEqual(listened2, [msgIDs[2], msgIDs[1]], 'erased and deleted')
remove2()
assert.deepEqual(listened1, [msgIDs[2]], 'erased') // still the same
await p(peer.close)(true)
})

View File

@ -1,56 +1,52 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-re-open')
const DIR = path.join(os.tmpdir(), 'pzp-db-re-open')
rimraf.sync(DIR)
test('create some msgs, close, re-open', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
test('publish some msgs, close, re-open', async (t) => {
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
t.pass('opened db')
const account = await p(peer.db.account.create)({ subdomain: 'person' })
// t.pass('opened db')
const msgHashes = []
const msgIDs = []
for (let i = 0; i < 6; i++) {
const rec = await p(peer.db.create)({
type: 'post',
content: { text: 'hello ' + i },
const rec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'hello ' + i },
})
msgHashes.push(rec.hash)
msgIDs.push(rec.id)
}
t.pass('created some msgs')
// t.pass('created some msgs')
await p(peer.db.del)(msgHashes[2])
t.pass('deleted the 3rd msg')
await p(peer.db.del)(msgIDs[2])
// t.pass('deleted the 3rd msg')
await p(peer.close)(true)
t.pass('closed')
// t.pass('closed')
const peer2 = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { keys, path: DIR })
t.pass('re-opened')
const peer2 = createPeer({ keypair, path: DIR })
// t.pass('re-opened')
await peer2.db.loaded()
const texts = []
for (const msg of peer2.db.msgs()) {
if (!msg.content) continue
texts.push(msg.content.text)
for await (const msg of peer2.db.msgs()) {
if (!msg.data || !(msg.metadata.account?.length > 4)) continue
texts.push(msg.data.text)
}
t.deepEquals(
assert.deepEqual(
texts,
['hello 0', 'hello 1', 'hello 3', 'hello 4', 'hello 5'],
'queried posts'

View File

@ -1,40 +1,41 @@
const test = require('tape')
const path = require('path')
const os = require('os')
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const os = require('node:os')
const p = require('node:util').promisify
const rimraf = require('rimraf')
const SecretStack = require('secret-stack')
const caps = require('ssb-caps')
const p = require('util').promisify
const { generateKeypair } = require('./util')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const DIR = path.join(os.tmpdir(), 'ppppp-db-records-iter')
const DIR = path.join(os.tmpdir(), 'pzp-db-records-iter')
rimraf.sync(DIR)
test('records() iterator', async (t) => {
const keys = generateKeypair('alice')
const peer = SecretStack({ appKey: caps.shs })
.use(require('../lib'))
.call(null, { keys, path: DIR })
const keypair = Keypair.generate('ed25519', 'alice')
const peer = createPeer({ keypair, path: DIR })
await peer.db.loaded()
const account = await p(peer.db.account.create)({ subdomain: 'person' })
for (let i = 0; i < 6; i++) {
await p(peer.db.create)({
type: i % 2 === 0 ? 'post' : 'about',
content:
await p(peer.db.feed.publish)({
account,
domain: i % 2 === 0 ? 'post' : 'about',
data:
i % 2 === 0
? { text: 'hello ' + i }
: { about: peer.id, name: 'Mr. #' + i },
: { about: keypair.public, name: 'Mr. #' + i },
})
}
let count = 0
for (const rec of peer.db.records()) {
if (!rec.msg.content) continue
t.true(rec.misc.size > rec.msg.metadata.size)
for await (const rec of peer.db.records()) {
if (!rec.msg.data) continue
if (rec.msg.metadata.account === 'self') continue
assert.ok(rec.received, 'received')
count++
}
t.equals(count, 6)
assert.equal(count, 6)
await p(peer.close)(true)
})

88
test/sigkeys.test.js Normal file
View File

@ -0,0 +1,88 @@
const test = require('node:test')
const assert = require('node:assert')
const path = require('node:path')
const p = require('node:util').promisify
const os = require('node:os')
const rimraf = require('rimraf')
const Keypair = require('pzp-keypair')
const { createPeer } = require('./util')
const MsgV4 = require('../lib/msg-v4')
const DIR = path.join(os.tmpdir(), 'pzp-db-sigkeys')
const DIR2 = path.join(os.tmpdir(), 'pzp-db-sigkeys2')
rimraf.sync(DIR)
rimraf.sync(DIR2)
test('sigkeys', async (t) => {
await t.test(
"Can't add msg that is signed by key newer than what accountTips points to",
async () => {
const keypair1 = Keypair.generate('ed25519', 'alice')
const keypair2 = Keypair.generate('ed25519', 'alice2')
const keypairOther = Keypair.generate('ed25519', 'bob')
const peer = createPeer({ keypair: keypair1, path: DIR })
const peerOther = createPeer({ keypair: keypairOther, path: DIR2 })
await peer.db.loaded()
await peerOther.db.loaded()
const account = await p(peer.db.account.create)({
keypair: keypair1,
subdomain: 'person',
})
const accountMsg0 = await p(peer.db.get)(account)
const consent = peer.db.account.consent({ account, keypair: keypair2 })
const accountRec1 = await p(peer.db.account.add)({
account,
keypair: keypair2,
consent,
powers: ['external-encryption'],
})
const goodRec = await p(peer.db.feed.publish)({
account,
domain: 'post',
data: { text: 'potatoGood' },
keypair: keypair2,
})
const postMootId = peer.db.feed.getID(account, 'post')
const postMootMsg = await p(peer.db.get)(postMootId)
const tangle = new MsgV4.Tangle(postMootId)
tangle.add(postMootId, postMootMsg)
tangle.add(goodRec.id, goodRec.msg)
const badMsg = MsgV4.create({
account,
accountTips: [account], // intentionally excluding keypair2
domain: 'post',
keypair: keypair2, // intentionally using newer key than accountTips points to
tangles: {
[postMootId]: tangle,
},
data: { text: 'potato' },
})
await assert.rejects(
p(peer.db.add)(badMsg, postMootId),
/add\(\) failed to verify msg/,
"Shouldn't be able to add() own bad msg"
)
await p(peerOther.db.add)(accountMsg0, account)
await p(peerOther.db.add)(accountRec1.msg, account)
await p(peerOther.db.add)(postMootMsg, postMootId)
await p(peerOther.db.add)(goodRec.msg, postMootId)
await assert.rejects(
p(peerOther.db.add)(badMsg, postMootId),
/add\(\) failed to verify msg/,
"Shouldn't be able to add() someone else's bad msg"
)
await p(peer.close)()
await p(peerOther.close)()
}
)
})

View File

@ -1,14 +1,12 @@
const ssbKeys = require('ssb-keys')
const SSBURI = require('ssb-uri2')
const base58 = require('bs58')
function generateKeypair(seed) {
const keys = ssbKeys.generate('ed25519', seed, 'buttwoo-v1')
const { data } = SSBURI.decompose(keys.id)
keys.id = `ppppp:feed/v1/${base58.encode(Buffer.from(data, 'base64'))}`
return keys
function createPeer(globalConfig) {
return require('secret-stack/bare')()
.use(require('secret-stack/plugins/net'))
.use(require('secret-handshake-ext/secret-stack'))
.use(require('../lib'))
.use(require('ssb-box'))
.call(null, { shse: { caps: require('pzp-caps') }, global: globalConfig })
}
module.exports = {
generateKeypair,
createPeer,
}

31
tsconfig.json Normal file
View File

@ -0,0 +1,31 @@
{
"include": [
"declarations",
"lib/**/*.js"
],
"exclude": [
"coverage/",
"node_modules/",
"test/"
],
"compilerOptions": {
"checkJs": true,
"declaration": true,
"emitDeclarationOnly": true,
"exactOptionalPropertyTypes": true,
"forceConsistentCasingInFileNames": true,
"noImplicitReturns": true,
"lib": [
"es2022",
"dom"
],
"module": "node16",
"skipLibCheck": true,
"strict": true,
"target": "es2022",
"typeRoots": [
"node_modules/@types",
"declarations"
]
}
}