log: remove some unnecessary methods

This commit is contained in:
Andre Staltz 2023-11-07 11:11:19 +02:00
parent b6d17e947f
commit 4302094926
No known key found for this signature in database
GPG Key ID: 9EDE23EA7E8A4890
4 changed files with 22 additions and 144 deletions

View File

@ -579,7 +579,7 @@ function AsyncAppendOnlyLog(filename, opts) {
}
/**
* @param {extractCodecType<typeof codec> | Array<extractCodecType<typeof codec>>} data
* @param {extractCodecType<typeof codec>} data
* @param {CB<number>} cb
*/
function append(data, cb) {
@ -588,69 +588,13 @@ function AsyncAppendOnlyLog(filename, opts) {
// return
// }
if (Array.isArray(data)) {
let offset = 0
for (let i = 0, length = data.length; i < length; ++i)
offset = appendSingle(data[i])
let offset
try {
offset = appendSingle(data)
} catch (err) {
return cb(/** @type {any} */ (err))
}
cb(null, offset)
} else cb(null, appendSingle(data))
}
/**
* @param {Array<any>} dataArray
* @param {CB<Array<number>>} cb
*/
function appendTransaction(dataArray, cb) {
if (!Array.isArray(dataArray)) {
return cb(appendTransactionWantsArrayErr())
}
// if (compaction) { // FIXME:
// waitingCompaction.push(() => appendTransaction(dataArray, cb))
// return
// }
let size = 0
const encodedDataArray = dataArray.map((data) => {
let encodedData = codec.encode(data)
if (typeof encodedData === 'string') encodedData = b4a.from(encodedData)
size += Record.size(encodedData)
return encodedData
})
size += EOB.SIZE
if (size > blockSize) return cb(appendLargerThanBlockErr())
assert(typeof nextOffsetInBlock === 'number', 'nextOffsetInBlock not set')
assert(typeof latestBlockIndex === 'number', 'latestBlockIndex not set')
if (nextOffsetInBlock + size > blockSize) {
// doesn't fit
const nextBlockBuf = b4a.alloc(blockSize)
latestBlockBuf = nextBlockBuf
latestBlockIndex += 1
nextOffsetInBlock = 0
debug("data doesn't fit current block, creating new")
}
assert(latestBlockBuf, 'latestBlockBuf not set')
const offsets = /** @type {Array<number>} */ ([])
for (const encodedData of encodedDataArray) {
Record.write(latestBlockBuf, nextOffsetInBlock, encodedData)
cache.set(latestBlockIndex, latestBlockBuf) // update cache
const offset = latestBlockIndex * blockSize + nextOffsetInBlock
offsets.push(offset)
blocksToBeWritten.set(latestBlockIndex, {
blockBuf: latestBlockBuf,
offset,
})
nextOffsetInBlock += Record.size(encodedData)
debug('data inserted at offset %d', offset)
}
scheduleWrite()
return cb(null, offsets)
}
const scheduleWrite = debounce(write, writeTimeout)
@ -873,11 +817,9 @@ function AsyncAppendOnlyLog(filename, opts) {
return {
// Public API:
get: onLoad(get),
scan: onLoad(scan),
scan: onLoad(scan), // TODO
del: onLoad(del), // TODO
append: onLoad(append), // TODO
appendTransaction: onLoad(appendTransaction),
close: onLoad(close), // TODO
onDrain: onLoad(onDrain), // TODO
onDeletesFlushed: onLoad(onDeletesFlushed),
@ -892,6 +834,9 @@ function AsyncAppendOnlyLog(filename, opts) {
overwrite,
truncate,
hasNoSpaceFor,
// Useful for tests
_get: onLoad(get),
}
}

View File

@ -1,67 +0,0 @@
const test = require('node:test')
const assert = require('node:assert')
const fs = require('node:fs')
const p = require('node:util').promisify
const Log = require('../../lib/log')
test('Log get() handles bad offset NaN', async function (t) {
const file = '/tmp/ppppp-db-log-test-bad-offset.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const msg = Buffer.from('testing')
const offset1 = await p(log.append)(msg)
assert.equal(offset1, 0)
await assert.rejects(p(log.get)(NaN), (err) => {
assert.match(err.message, /Offset NaN is not a number/, err.message)
assert.equal(err.code, 'ERR_AAOL_INVALID_OFFSET')
return true
})
await p(log.close)()
})
test('Log get() handles bad offset -1', async function (t) {
const file = '/tmp/ppppp-db-log-test-bad-offset.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const msg = Buffer.from('testing')
const offset1 = await p(log.append)(msg)
assert.equal(offset1, 0)
await assert.rejects(p(log.get)(-1), (err) => {
assert.match(err.message, /Offset -1 is negative/, err.message)
assert.equal(err.code, 'ERR_AAOL_INVALID_OFFSET')
return true
})
await p(log.close)()
})
test('Log get() handles bad offset out of bounds', async function (t) {
const file = '/tmp/ppppp-db-log-test-bad-offset.log'
try {
fs.unlinkSync(file)
} catch (_) {}
const log = Log(file, { blockSize: 2 * 1024 })
const msg = Buffer.from('testing')
const offset1 = await p(log.append)(msg)
assert.equal(offset1, 0)
await assert.rejects(p(log.get)(10240), (err) => {
assert.match(err.message, /Offset 10240 is beyond log size/, err.message)
assert.equal(err.code, 'ERR_AAOL_OFFSET_OUT_OF_BOUNDS')
return true
})
await p(log.close)()
})

View File

@ -20,10 +20,10 @@ test('Log handles basic binary records', async function (t) {
const offset2 = await p(log.append)(msg2)
assert.equal(offset2, msg1.length + 2)
const b1 = await p(log.get)(offset1)
const b1 = await p(log._get)(offset1)
assert.equal(b1.toString(), msg1.toString())
const b2 = await p(log.get)(offset2)
const b2 = await p(log._get)(offset2)
assert.equal(b2.toString(), msg2.toString())
await p(log.close)()
@ -48,10 +48,10 @@ test('Log handles basic json records', async function (t) {
const offset2 = await p(log.append)(json2)
assert.equal(offset2, 20)
const rec1 = await p(log.get)(offset1)
const rec1 = await p(log._get)(offset1)
assert.deepEqual(rec1, json1)
const rec2 = await p(log.get)(offset2)
const rec2 = await p(log._get)(offset2)
assert.deepEqual(rec2, json2)
await p(log.close)()
@ -67,10 +67,10 @@ test('Log handles basic json record re-reading', async function (t) {
await p(log.onDrain)()
assert.equal(log.since.value, 20)
const rec1 = await p(log.get)(0)
const rec1 = await p(log._get)(0)
assert.deepEqual(rec1, json1)
const rec2 = await p(log.get)(20)
const rec2 = await p(log._get)(20)
assert.deepEqual(rec2, json2)
await p(log.close)()

View File

@ -28,16 +28,16 @@ test('Log performing simple delete', async (t) => {
const offset3 = await p(log.append)(msg3)
assert.ok(offset3 > offset2)
const buf1 = await p(log.get)(offset1)
const buf1 = await p(log._get)(offset1)
assert.equal(buf1.toString(), msg1.toString())
const buf2 = await p(log.get)(offset2)
const buf2 = await p(log._get)(offset2)
assert.equal(buf2.toString(), msg2.toString())
const buf3 = await p(log.get)(offset3)
const buf3 = await p(log._get)(offset3)
assert.equal(buf3.toString(), msg3.toString())
await p(log.del)(offset2)
await p(log.onDeletesFlushed)()
await assert.rejects(p(log.get)(offset2), (err) => {
await assert.rejects(p(log._get)(offset2), (err) => {
assert.ok(err)
assert.equal(err.message, 'Record has been deleted')
assert.equal(err.code, 'ERR_AAOL_DELETED_RECORD')
@ -99,7 +99,7 @@ test('Log deleted records are not invalid upon re-opening', async (t) => {
)
})
await assert.rejects(p(log2.get)(offset2), (err) => {
await assert.rejects(p(log2._get)(offset2), (err) => {
assert.ok(err)
assert.equal(err.message, 'Record has been deleted')
assert.equal(err.code, 'ERR_AAOL_DELETED_RECORD')