maxLogBytes is a start() argument

This commit is contained in:
Andre Staltz 2023-11-24 15:35:31 +02:00
parent bd6eae2976
commit 62c321a1b8
No known key found for this signature in database
GPG Key ID: 9EDE23EA7E8A4890
6 changed files with 25 additions and 41 deletions

View File

@ -38,16 +38,6 @@ function assertGoalsPlugin(peer) {
if (!peer.goals) throw new Error('gc plugin requires ppppp-goals plugin') if (!peer.goals) throw new Error('gc plugin requires ppppp-goals plugin')
} }
/**
* @param {Config} config
* @returns {asserts config is ExpectedConfig}
*/
function assertValidConfig(config) {
if (typeof config.gc?.maxLogBytes !== 'number') {
throw new Error('gc requires config.gc.maxLogBytes')
}
}
/** /**
* @param {{ db: PPPPPDB | null, goals: PPPPPGoal | null }} peer * @param {{ db: PPPPPDB | null, goals: PPPPPGoal | null }} peer
* @param {Config} config * @param {Config} config
@ -56,12 +46,6 @@ function initGC(peer, config) {
// Assertions // Assertions
assertDBPlugin(peer) assertDBPlugin(peer)
assertGoalsPlugin(peer) assertGoalsPlugin(peer)
assertValidConfig(config)
const MAX_LOG_BYTES = config.gc.maxLogBytes
/** Number of records that match roughly 1% of the max log size */
const CHECKPOINT = Math.floor((MAX_LOG_BYTES * 0.01) / 500) // assuming 1 record = 500 bytes
// State // State
const debug = makeDebug('ppppp:gc') const debug = makeDebug('ppppp:gc')
@ -133,10 +117,11 @@ function initGC(peer, config) {
/** /**
* @param {number} percentUsed * @param {number} percentUsed
* @param {number} maxLogBytes
* @param {{ totalBytes: number; }} stats * @param {{ totalBytes: number; }} stats
*/ */
function reportCleanupNeed(percentUsed, stats) { function reportCleanupNeed(percentUsed, maxLogBytes, stats) {
const bytesRemaining = MAX_LOG_BYTES - stats.totalBytes const bytesRemaining = maxLogBytes - stats.totalBytes
const kbRemaining = bytesRemaining >> 10 const kbRemaining = bytesRemaining >> 10
const mbRemaining = bytesRemaining >> 20 const mbRemaining = bytesRemaining >> 20
const remaining = const remaining =
@ -169,21 +154,27 @@ function initGC(peer, config) {
/** /**
* Monitor the log size and schedule compaction and/or cleanup. * Monitor the log size and schedule compaction and/or cleanup.
*
* @param {number} maxLogBytes
*/ */
function monitorLogSize() { function monitorLogSize(maxLogBytes) {
assertDBPlugin(peer) assertDBPlugin(peer)
/** Number of records that match roughly 1% of the max log size */
const CHECKPOINT = Math.floor((maxLogBytes * 0.01) / 500) // assuming 1 record = 500 bytes
function checkLogSize() { function checkLogSize() {
assertDBPlugin(peer) assertDBPlugin(peer)
peer.db.log.stats((err, stats) => { peer.db.log.stats((err, stats) => {
if (err) return if (err) return
const percentUsed = (stats.totalBytes / MAX_LOG_BYTES) * 100 const percentUsed = (stats.totalBytes / maxLogBytes) * 100
const percentDeleted = (stats.deletedBytes / stats.totalBytes) * 100 const percentDeleted = (stats.deletedBytes / stats.totalBytes) * 100
const needsCleanup = percentUsed > 80 const needsCleanup = percentUsed > 80
const needsCompaction = percentDeleted > 30 const needsCompaction = percentDeleted > 30
// Schedule clean up // Schedule clean up
if ((needsCleanup || needsCompaction) && !hasCleanupScheduled) { if ((needsCleanup || needsCompaction) && !hasCleanupScheduled) {
if (needsCleanup) reportCleanupNeed(percentUsed, stats) if (needsCleanup) reportCleanupNeed(percentUsed, maxLogBytes, stats)
if (needsCompaction) reportCompactionNeed(percentDeleted, stats) if (needsCompaction) reportCompactionNeed(percentDeleted, stats)
hasCleanupScheduled = true hasCleanupScheduled = true
if (needsCleanup) { if (needsCleanup) {
@ -212,9 +203,15 @@ function initGC(peer, config) {
checkLogSize() checkLogSize()
} }
function start() { /**
* @param {number?} maxLogBytes
*/
function start(maxLogBytes) {
const actualMaxLogBytes = maxLogBytes ?? config.gc?.maxLogBytes ?? null
// prettier-ignore
if (!actualMaxLogBytes) throw new Error('gc plugin requires maxLogBytes via start() argument or config.gc.maxLogBytes')
if (!stopMonitoringLogSize) { if (!stopMonitoringLogSize) {
monitorLogSize() monitorLogSize(actualMaxLogBytes)
} }
} }

View File

@ -25,7 +25,6 @@ function isPresent(msg) {
test('Dict ghosts', async (t) => { test('Dict ghosts', async (t) => {
const alice = createPeer({ const alice = createPeer({
name: 'alice', name: 'alice',
gc: { maxLogBytes: 100 * 1024 * 1024 },
dict: { ghostSpan: 2 }, dict: { ghostSpan: 2 },
}) })

View File

@ -10,7 +10,6 @@ function getTexts(msgs) {
test('Feed decay', async (t) => { test('Feed decay', async (t) => {
const alice = createPeer({ const alice = createPeer({
name: 'alice', name: 'alice',
gc: { maxLogBytes: 100 * 1024 * 1024 },
}) })
await alice.db.loaded() await alice.db.loaded()

View File

@ -10,7 +10,6 @@ function getTexts(msgs) {
test('Feed holes', async (t) => { test('Feed holes', async (t) => {
const alice = createPeer({ const alice = createPeer({
name: 'alice', name: 'alice',
gc: { maxLogBytes: 100 * 1024 * 1024 },
}) })
await alice.db.loaded() await alice.db.loaded()

View File

@ -14,7 +14,6 @@ function getTexts(msgs) {
test('Orphan weave msgs', async (t) => { test('Orphan weave msgs', async (t) => {
const alice = createPeer({ const alice = createPeer({
name: 'alice', name: 'alice',
gc: { maxLogBytes: 100 * 1024 * 1024 },
}) })
await alice.db.loaded() await alice.db.loaded()

View File

@ -8,11 +8,7 @@ function getTexts(msgs) {
} }
test('Cleanup is scheduled automatically', async (t) => { test('Cleanup is scheduled automatically', async (t) => {
const alice = createPeer({ const alice = createPeer({ name: 'alice' })
name: 'alice',
gc: { maxLogBytes: 4 * 1024 }, // 4kB, approximately 8 messages
})
await alice.db.loaded() await alice.db.loaded()
// Alice creates her own account // Alice creates her own account
@ -40,8 +36,7 @@ test('Cleanup is scheduled automatically', async (t) => {
alice.goals.set(postFeedID, 'newest-3') alice.goals.set(postFeedID, 'newest-3')
assert('alice set a goal for newest-3 of post feed') assert('alice set a goal for newest-3 of post feed')
alice.gc.start() alice.gc.start(4 * 1024), // 4kB, approximately 8 messages
await p(setTimeout)(3000) await p(setTimeout)(3000)
assert.deepEqual( assert.deepEqual(
@ -54,11 +49,7 @@ test('Cleanup is scheduled automatically', async (t) => {
}) })
test('Compaction is scheduled automatically', async (t) => { test('Compaction is scheduled automatically', async (t) => {
const alice = createPeer({ const alice = createPeer({ name: 'alice' })
name: 'alice',
gc: { maxLogBytes: 6 * 1024 }, // 6kB, approximately 12 messages
})
await alice.db.loaded() await alice.db.loaded()
// Alice creates her own account // Alice creates her own account
@ -101,7 +92,7 @@ test('Compaction is scheduled automatically', async (t) => {
alice.goals.set(alice.db.feed.getID(aliceID, 'post3'), 'all') alice.goals.set(alice.db.feed.getID(aliceID, 'post3'), 'all')
alice.goals.set(alice.db.feed.getID(aliceID, 'post4'), 'all') alice.goals.set(alice.db.feed.getID(aliceID, 'post4'), 'all')
alice.gc.start() alice.gc.start(6 * 1024) // 6kB, approximately 12 messages
await p(setTimeout)(3000) await p(setTimeout)(3000)