Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 19 additions & 5 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
const mutexify = require('mutexify')
const b4a = require('b4a')

const { BlobReadStream, BlobWriteStream } = require('./lib/streams')
const { BlockMapReadStream, BlobReadStream, BlobWriteStream } = require('./lib/streams')
const Monitor = require('./lib/monitor')
const blockMap = require('./lib/block-map')

const DEFAULT_BLOCK_SIZE = 2 ** 16

Expand Down Expand Up @@ -153,9 +154,7 @@ class Hyperblobs {
}

async get(id, opts) {
const all =
!opts || (!opts.start && opts.length === undefined && opts.end === undefined && !opts.core)
if (all) return this._getAll(id, opts)
if (isAll(id, opts)) return this._getAll(id, opts)

const res = []
try {
Expand All @@ -172,12 +171,18 @@ class Hyperblobs {
}

async clear(id, opts) {
if (id.blockMap) {
const map = await blockMap.get(this.core, id, { wait: false })
if (map) {
for (const b of map.blocks) await this.core.clear(b.index, b.index + 1, opts)
}
}
return this.core.clear(id.blockOffset, id.blockOffset + id.blockLength, opts)
}

createReadStream(id, opts) {
const core = opts && opts.core ? opts.core : this.core
return new BlobReadStream(core, id, opts)
return id.blockMap ? new BlockMapReadStream(core, id, opts) : new BlobReadStream(core, id, opts)
}

createWriteStream(opts) {
Expand Down Expand Up @@ -225,3 +230,12 @@ class Hyperblobs {
}

module.exports = Hyperblobs

function isAll(id, opts) {
if (id.blockMap) return false
if (!opts) return true
if (opts.start) return false
if (opts.length !== undefined || opts.end !== undefined) return false
if (opts.core) return false
return true
}
157 changes: 157 additions & 0 deletions lib/block-map.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
const c = require('compact-encoding')
const b4a = require('b4a')
const crypto = require('hypercore-crypto')

const block = {
preencode(state, m) {
c.uint.preencode(state, m.index)
c.uint.preencode(state, m.byteLength)
},
encode(state, m) {
c.uint.encode(state, m.index)
c.uint.encode(state, m.byteLength)
},
decode(state) {
return {
index: c.uint.decode(state),
byteLength: c.uint.decode(state)
}
}
}

const list = c.array(block)

const map = {
preencode(state, m) {
c.uint.preencode(state, 0)
list.preencode(state, m.blocks)
},
encode(state, m) {
c.uint.encode(state, 0)
list.encode(state, m.blocks)
},
decode(state) {
const version = c.uint.decode(state)
if (version > 0) throw new Error('Unsupported block map version')

return {
version,
blocks: list.decode(state)
}
}
}

exports.hash = hashId

function hashId(block) {
return b4a.toString(crypto.hash(block), 'hex')
}

exports.get = getBlockMap

async function inferBlockMap(core, id, opts = {}) {
const hashes = !!opts.hashes

const map = {
hashes: hashes ? new Map() : null,
blocks: []
}

for (let i = id.blockOffset; i < id.blockOffset + id.blockLength; i++) {
const block = await core.get(i)
const entry = { index: i, byteLength: block.byteLength }
map.blocks.push(entry)
if (hashes) map.hashes.set(hashId(block), entry)
}

return map
}

async function getBlockMap(core, id, opts = {}) {
if (!id.blockMap) return inferBlockMap(core, id, opts)

if (id.blockLength > 64) {
throw new Error('Block map is too large')
}

const hashes = !!opts.hashes

const map = {
hashes: hashes ? new Map() : null,
blocks: null
}

const promises = []

for (let i = id.blockOffset; i < id.blockOffset + id.blockLength; i++) {
promises.push(core.get(i))
}

const buffers = await Promise.all(promises)
const m = decodeBlockMap(buffers)
if (!m) return null

map.blocks = m.blocks

if (hashes && !core.writable) {
const blocks = []
for (let i = 0; i < map.blocks.length; i++) blocks.push(map.blocks[i].index)
core.download({ blocks })
}

if (hashes) {
for (let i = 0; i < map.blocks.length; i++) {
const b = map.blocks[i]
const block = await core.get(b.index)
if (block === null) return null
map.hashes.set(hashId(block), b)
}
}

return map
}

exports.encode = encodeBlockMap

function encodeBlockMap(header) {
const result = []

for (let i = 0; i < header.blocks.length; i += 8192) {
const blocks =
i === 0 && header.blocks.length < 8192 ? header.blocks : header.blocks.slice(i, i + 8192)

const state = { start: 0, end: 0, buffer: null }
const m = { version: 0, blocks }

map.preencode(state, m)
state.buffer = b4a.allocUnsafe(state.end)
map.encode(state, m)

result.push(state.buffer)
}

return result
}

exports.decode = decodeBlockMap

function decodeBlockMap(buffers) {
const result = {
version: 0,
blocks: null
}

for (let i = 0; i < buffers.length; i++) {
if (!buffers[i]) return null

const state = { start: 0, end: buffers[i].byteLength, buffer: buffers[i] }
const r = map.decode(state)

result.version = r.version

if (result.blocks) result.blocks.push(...r.blocks)
else result.blocks = r.blocks
}

return result
}
Loading