diff --git a/.pw-testrc.js b/.pw-testrc.js index 60982ea..69e84fa 100644 --- a/.pw-testrc.js +++ b/.pw-testrc.js @@ -3,6 +3,7 @@ import { pathToFileURL } from 'node:url' // Adds a esbuild plugin so we can resolve file URLs relative to the // import.meta.url property. + export default { buildConfig: { plugins: [ @@ -12,8 +13,8 @@ export default { onLoad({ filter: /\.js|\.ts/, namespace: 'file' }, (args) => { let code = FS.readFileSync(args.path, 'utf8') code = code.replace( - /new URL\((.*), import\.meta\.url\)/g, - `new URL(\$1, ${JSON.stringify(pathToFileURL(args.path))})` + /import\.meta\.url/g, + JSON.stringify(pathToFileURL(args.path)) ) return { contents: code } }) diff --git a/src/aggregate.js b/src/aggregate.js index 81da130..7749763 100644 --- a/src/aggregate.js +++ b/src/aggregate.js @@ -9,7 +9,7 @@ import { indexAreaStart } from './inclusion.js' const NodeSize = BigInt(Node.Size) const EntrySize = Number(Index.EntrySize) -export const MAX_CAPACITY = 2n ** BigInt(Tree.MAX_LOG2_LEAFS) * NodeSize +export const MAX_CAPACITY = 2n ** BigInt(Tree.MAX_HEIGHT) * NodeSize /** * Default aggregate size (32GiB). @@ -148,7 +148,7 @@ class AggregateBuilder { * offset: API.uint64 * }, RangeError>} */ - estimate({ root, size }) { + estimate({ link, size }) { if (this.parts.length >= this.limit) { return { error: new RangeError( @@ -183,7 +183,7 @@ class AggregateBuilder { return { ok: { - parts: [{ node: root, location: { level, index } }], + parts: [{ node: link.multihash.digest, location: { level, index } }], offset: offset - this.offset, }, } @@ -197,7 +197,7 @@ class Aggregate { * @param {API.uint64} source.offset * @param {API.MerkleTreeNodeSource[]} source.parts * @param {number} source.limit - * @param {API.MerkleTree} source.tree + * @param {API.AggregateTree} source.tree */ constructor({ tree, parts, limit, size, offset }) { this.tree = tree @@ -205,6 +205,7 @@ class Aggregate { this.limit = limit this.size = size this.offset = offset + this.link = Piece.createLink(this.tree.root) } /** * Size of the index in bytes. @@ -212,17 +213,16 @@ class Aggregate { get indexSize() { return this.limit * EntrySize } - link() { - return Piece.createLink(this.tree.root) + /** + * Height of the perfect binary merkle tree corresponding to this aggregate. + */ + get height() { + return this.tree.height } toJSON() { return { - link: { '/': this.link().toString() }, - // Note that currently our aggregate size is always 32GiB and that is - // below the `Number.MAX_SAFE_INTEGER` so we can safely convert it to - // a number. - // ⚠️ We must revisit this to support larger aggregates in the future. - size: Number(this.size), + link: { '/': this.link.toString() }, + height: this.height, } } } diff --git a/src/aggregate/tree.js b/src/aggregate/tree.js index c6bcfee..5c55fa8 100644 --- a/src/aggregate/tree.js +++ b/src/aggregate/tree.js @@ -5,58 +5,57 @@ import * as Proof from '../proof.js' import { pow2 } from '../uint64.js' /** - * We allow up to 2 ** 60 leafs in the tree, with is greater than then - * Number.MAX_SAFE_INTEGER ((2 ** 53) - 1) which is why we need to use - * uint64s. + * We limit tree height to 60, since we have a perfect binary merkle tree this + * will fit up to 2 ** 60 of leafs nodes. */ -export const MAX_LOG2_LEAFS = 60 +export const MAX_HEIGHT = 60 /** - * @param {number} log2Leafs + * Creates a new tree with a given tree `height`. + * + * @param {number} height * @returns {API.AggregateTree} */ -export const create = (log2Leafs) => { - if (log2Leafs > MAX_LOG2_LEAFS) { - throw new RangeError(`too many leafs: 2 ** ${log2Leafs}`) +export const create = (height) => { + if (height > MAX_HEIGHT) { + throw new RangeError(`too many leafs: 2 ** ${height}`) } - if (log2Leafs < 0) { + if (height < 0) { throw new RangeError(`cannot have negative log2Leafs`) } - return new AggregateTree(log2Leafs) + return new AggregateTree(height) } +/** + * @implements {API.AggregateTree} + */ class AggregateTree { /** - * The sparse array contains the data of the tree. Levels of the tree are - * counted from the leaf layer (layer 0). - * Where the leaf layer lands depends on the `log2Leafs` value. - * The root node of a the tree is stored at position [1]. - * - * @param {number} log2Leafs + * @param {number} height * @param {SparseArray} data */ - constructor(log2Leafs, data = new SparseArray()) { - this.log2Leafs = log2Leafs + constructor(height, data = new SparseArray()) { + /** + * The sparse array contains the data of the tree. Levels of the tree are + * counted from the leaf layer (layer 0). + * + * Where the leaf layer lands depends on the `height` of the tree. + */ this.data = data - } - - get maxLevel() { - return this.log2Leafs + this.height = height } get leafCount() { - return 2n ** BigInt(this.log2Leafs) - } - - get depth() { - return this.log2Leafs + 1 + // Since this is a perfect binary tree, the leaf count is 2 ** height, it + // is a bigint as it may exceed Number.MAX_SAFE_INTEGER (2 ** 53 - 1). + return 2n ** BigInt(this.height) } get root() { - return this.node(this.maxLevel, 0n) + return this.node(this.height, 0n) } /** @@ -67,11 +66,11 @@ class AggregateTree { * @returns {API.ProofData} */ collectProof(level, index) { - validateLevelIndex(this.log2Leafs, level, index) + validateLevelIndex(this.height, level, index) const path = [] let currentLevel = level let currentIndex = index - while (currentLevel < this.maxLevel) { + while (currentLevel < this.height) { // idx^1 is the sibling index const node = this.node(currentLevel, currentIndex ^ 1n) currentIndex = currentIndex / 2n @@ -99,7 +98,7 @@ class AggregateTree { * @param {API.MerkleTreeNode} node */ setNode(level, index, node) { - validateLevelIndex(this.log2Leafs, level, index) + validateLevelIndex(this.height, level, index) if (level > 0) { let left = getNodeRaw(this, level - 1, 2n * index) @@ -114,11 +113,11 @@ class AggregateTree { } } - this.data.set(idxFor(this.log2Leafs, level, index), node) + this.data.set(idxFor(this.height, level, index), node) let currentIndex = index let n = level - while (n < this.maxLevel) { + while (n < this.height) { const nextIndex = currentIndex >> 1n // clear the lowest bit of index for left node const left = getNodeRaw(this, n, currentIndex & ~1n) @@ -134,7 +133,7 @@ class AggregateTree { right || ZeroComm.fromLevel(n) ) - this.data.set(idxFor(this.log2Leafs, n + 1, nextIndex), node) + this.data.set(idxFor(this.height, n + 1, nextIndex), node) currentIndex = nextIndex n++ } @@ -161,19 +160,21 @@ const BigIntSparseBlockSize = BigInt(SparseBlockSize) /** * @template T + * @implements {API.SparseArray} */ class SparseArray { /** - * @param {Map} subs + * @param {Map} shards */ - constructor(subs = new Map()) { + constructor(shards = new Map()) { /** * @private */ - this.subs = subs + this.shards = shards } clear() { - this.subs.clear() + this.shards.clear() + return this } /** * @param {API.uint64} index @@ -181,7 +182,7 @@ class SparseArray { */ at(index) { const subIndex = index / BigIntSparseBlockSize - const sub = this.subs.get(subIndex) + const sub = this.shards.get(subIndex) if (!sub) { return undefined } @@ -194,13 +195,15 @@ class SparseArray { */ set(index, value) { const subIndex = index / BigIntSparseBlockSize - let sub = this.subs.get(subIndex) - if (!sub) { - sub = new Array(SparseBlockSize) - this.subs.set(subIndex, sub) + let shard = this.shards.get(subIndex) + if (!shard) { + shard = new Array(SparseBlockSize) + this.shards.set(subIndex, shard) } - sub[Number(index % BigIntSparseBlockSize)] = value + shard[Number(index % BigIntSparseBlockSize)] = value + + return this } // ignore fon now it will be used by inclusion code @@ -211,19 +214,19 @@ class SparseArray { * @private */ slice(start, end) { - const startSub = start / BigIntSparseBlockSize - const endSub = (end - 1n) / BigIntSparseBlockSize - if (startSub !== endSub) { + const startShard = start / BigIntSparseBlockSize + const endShard = (end - 1n) / BigIntSparseBlockSize + if (startShard !== endShard) { throw new Error('requested slice does not align with one sparse block') } - let sub = this.subs.get(startSub) - if (!sub) { - sub = new Array(SparseBlockSize) - this.subs.set(startSub, sub) + let shard = this.shards.get(startShard) + if (!shard) { + shard = new Array(SparseBlockSize) + this.shards.set(startShard, shard) } - return sub.slice( + return shard.slice( Number(start % BigIntSparseBlockSize), Number(end % BigIntSparseBlockSize) ) @@ -252,7 +255,7 @@ export const clear = (tree) => { /** * @typedef {{ - * log2Leafs: number + * height: number * data: SparseArray * }} Model * @@ -261,9 +264,9 @@ export const clear = (tree) => { * @param {API.uint64} idx */ const getNodeRaw = (tree, level, idx) => { - validateLevelIndex(tree.log2Leafs, level, idx) + validateLevelIndex(tree.height, level, idx) - return tree.data.at(idxFor(tree.log2Leafs, level, idx)) + return tree.data.at(idxFor(tree.height, level, idx)) } /** @@ -290,13 +293,13 @@ const validateLevelIndex = (maxLevel, level, index) => { } /** - * @param {number} maxLevel + * @param {number} height * @param {number} level * @param {API.uint64} index * @returns {API.uint64} */ -export const idxFor = (maxLevel, level, index) => { - const depth = maxLevel - level +export const idxFor = (height, level, index) => { + const depth = height - level // Hybrid Tree stores the MT as smaller trees in chunks dictated by SparseBlockSize // For example with SparseBlockLog2Size of 8, each SparseBlock will store a single // 8 deep tree. These threes are then stored one after breath-wise. diff --git a/src/api.ts b/src/api.ts index f2cba2e..d3a951f 100644 --- a/src/api.ts +++ b/src/api.ts @@ -1,5 +1,7 @@ +import exp from 'constants' import type { Link, ToString } from 'multiformats/link' +export { ToString } /** * Implementers of the `Read` interface are called "readers". Readers * allow for reading bytes from an underlying source. @@ -50,12 +52,6 @@ export interface Aggregate { tree: AggregateTree } -export interface AggregateState { - capacity: number - offset: number - parts: MerkleTreeNodeSource[] -} - export interface Vector extends Iterable { append(value: T): Vector } @@ -77,14 +73,15 @@ export interface IndexData { } export interface MerkleTree { - /** - * The Depth of the tree. A single-node tree has depth of 1 - */ - depth: number /** * Amount of leafs in this Merkle tree. */ leafCount: I + + /** + * Height of the tree. + */ + height: number /** * Root node of this Merkle tree. */ @@ -113,10 +110,10 @@ export interface PieceTree extends MerkleTree { leafs: MerkleTreeNode[] } -export interface AggregateTree - extends MerkleTree, - MerkleTreeBuilder { - collectProof(level: number, index: uint64): ProofData +export interface AggregateTree + extends MerkleTree, + MerkleTreeBuilder { + collectProof(level: number, index: I): ProofData } export interface PieceInfo { @@ -124,7 +121,7 @@ export interface PieceInfo { * Commitment to the data segment (Merkle node which is the root of the * subtree containing all the nodes making up the data segment) */ - root: MerkleTreeNode + link: PieceLink /** * Size is the number of padded bytes that is contained in this piece. @@ -132,24 +129,39 @@ export interface PieceInfo { size: PaddedPieceSize } -export interface Piece extends PieceInfo { - link(): PieceLink - toJSON(): { - link: { '/': string } - size: number - } +export interface PieceInfoView extends PieceInfo { + /** + * Height of the perfect binary merkle tree representing + * this piece. + */ + height: number } -export interface ContentPiece extends Piece { +/** + * Represents a piece tree and underlying merkle tree. + */ +export interface Piece extends PieceInfoView { + tree: PieceTree + + /** + * Size of the payload from which this piece was derived. + */ contentSize: number + + /** + * Size after 0 padding to next power of 2. + */ paddedSize: number - toJSON(): { - link: { '/': string } - contentSize: number - paddedSize: number - size: number - } + /** + * Returns a JSON representation of this piece. + */ + toJSON(): PieceJSON +} + +export interface PieceJSON { + link: { '/': string } + height: number } export type PieceLink = Link @@ -240,11 +252,31 @@ export interface TreeData { * thus `nodes[0].length === 1, nodes[1].length === 2len(nodes[1]) = 2`, etc... */ nodes: MerkleTreeNode[][] + /** * Leafs is the amount of raw leafs being used. I.e. without padding to * nearest two-power */ - leafs: number + height: number +} + +export interface AggregateTreeData { + /** + * Height of the (perfect binary) tree. + */ + height: number + + /** + * Sparse array that contains tree nodes. Levels + * of the tree are counted from the leaf layer (0). + */ + data: SparseArray +} + +export interface SparseArray { + clear(): this + at(index: uint64): T | undefined + set(index: uint64, value: T): this } export interface ProofData { diff --git a/src/fr32.js b/src/fr32.js index f1a66cb..27c48fa 100644 --- a/src/fr32.js +++ b/src/fr32.js @@ -47,12 +47,12 @@ const FR_RATIO = IN_BITS_FR / OUT_BITS_FR */ export function toZeroPaddedSize(sourceSize) { const size = Math.max(sourceSize, MIN_PIECE_SIZE) - let highestBit = Math.floor(Math.log2(size)) + const highestBit = Math.floor(Math.log2(size)) - const bound = Math.ceil(FR_RATIO * (1 << (highestBit + 1))) + const bound = Math.ceil(FR_RATIO * 2 ** (highestBit + 1)) // the size is either the closest pow2 number, or the next pow2 number if we // don't have space for padding - return size <= bound ? bound : Math.ceil(FR_RATIO * (1 << (highestBit + 2))) + return size <= bound ? bound : Math.ceil(FR_RATIO * 2 ** (highestBit + 2)) } /** diff --git a/src/piece.js b/src/piece.js index f85f64a..58943e2 100644 --- a/src/piece.js +++ b/src/piece.js @@ -6,6 +6,7 @@ import * as Link from 'multiformats/link' import * as Tree from './piece/tree.js' import * as UnpaddedSize from './piece/unpadded-size.js' import * as PaddedSize from './piece/padded-size.js' +import { log2Ceil } from './uint64.js' export { Tree } @@ -20,55 +21,90 @@ export const SHA2_256_TRUNC254_PADDED = 0x1012 export const FilCommitmentUnsealed = 0xf101 /** - * MaxLayers is the current maximum height of the rust-fil-proofs proving tree. + * Current maximum piece size is limited by the maximum number of leaves in the + * tree, which is limited by max size of the JS array, which is 128GiB. */ -export const MAX_LAYERS = 31 // result of log2( 64 GiB / 32 ) +export const MAX_PIECE_SIZE = Tree.MAX_LEAF_COUNT * NodeSize /** - * Current maximum size of the rust-fil-proofs proving tree. + * The maximum amount of data that one can compute for the piece. */ -export const MAX_PIECE_SIZE = 1 << (MAX_LAYERS + 5) - -/** - * MaxPiecePayload is the maximum amount of data that one can compute commP for. - * Constrained by the value of {@link MAX_LAYERS}. - */ -export const MAX_PIECE_PAYLOAD = (MAX_PIECE_SIZE / 128) * 127 +export const MAX_PAYLOAD_SIZE = (MAX_PIECE_SIZE / 128) * 127 export { UnpaddedSize, PaddedSize } -class Piece { +/** + * @param {API.PieceInfo} piece + */ +class PieceInfo { /** * @param {object} data - * @param {number} data.size - * @param {API.MerkleTree} data.tree + * @param {API.PieceLink} data.link + * @param {number} data.height */ - constructor({ size, tree }) { - this.contentSize = size - this.tree = tree + constructor({ link, height }) { + this.link = link + this.height = height } - get root() { - return this.tree.root + get size() { + return 2n ** BigInt(this.height) * BigInt(NodeSize) } - get paddedSize() { - return Fr32.toZeroPaddedSize(this.contentSize) + toJSON() { + return toJSON(this) } - get size() { - return BigInt(this.tree.leafCount) * BigInt(NodeSize) + toString() { + return toString(this) } - link() { - return createLink(this.tree.root) +} + +/** + * @implements {API.Piece} + */ +class Piece extends PieceInfo { + /** + * @param {object} data + * @param {number} data.contentSize + * @param {API.PieceTree} data.tree + */ + constructor({ contentSize, tree }) { + super({ link: createLink(tree.root), height: tree.height }) + this.contentSize = contentSize + this.tree = tree } - toJSON() { - return { - link: { '/': this.link().toString() }, - contentSize: this.contentSize, - paddedSize: this.paddedSize, - size: Number(this.size), - } + + get paddedSize() { + return Fr32.toZeroPaddedSize(this.contentSize) } } +/** + * @param {API.PieceInfo} piece + * @returns {API.PieceJSON} + */ +export const toJSON = (piece) => ({ + link: { '/': piece.link.toString() }, + height: PaddedSize.toHeight(piece.size), +}) + +/** + * + * @param {API.PieceJSON} json + * @returns {API.PieceInfoView} + */ +export const fromJSON = ({ link, height }) => + new PieceInfo({ link: Link.parse(link['/']), height }) + +/** + * @param {API.PieceInfo} piece + * @returns {API.ToString} + */ +export const toString = (piece) => JSON.stringify(toJSON(piece), null, 2) + +/** + * @param {API.ToString|string} source + */ +export const fromString = (source) => fromJSON(JSON.parse(source)) + /** * Creates Piece CID from the the merkle tree root. It will not perform * any validation. @@ -84,16 +120,22 @@ export const createLink = (root) => /** * @param {Uint8Array} source - * @returns {API.ContentPiece} + * @returns {API.Piece} */ export const build = (source) => { - if (source.byteLength < Fr32.MIN_PIECE_SIZE) { + if (source.length < Fr32.MIN_PIECE_SIZE) { + throw new RangeError( + `Piece is not defined for payloads smaller than ${Fr32.MIN_PIECE_SIZE} bytes` + ) + } + + if (source.length > MAX_PAYLOAD_SIZE) { throw new RangeError( - `commP is not defined for inputs shorter than ${Fr32.MIN_PIECE_SIZE} bytes` + `Payload exceeds maximum supported size of ${MAX_PAYLOAD_SIZE} bytes` ) } const tree = Tree.build(Fr32.pad(source)) - return new Piece({ tree, size: source.byteLength }) + return new Piece({ tree, contentSize: source.byteLength }) } diff --git a/src/piece/padded-size.js b/src/piece/padded-size.js index 035ab01..5a1d5ee 100644 --- a/src/piece/padded-size.js +++ b/src/piece/padded-size.js @@ -1,5 +1,8 @@ import * as API from '../api.js' -import { onesCount64 } from '../uint64.js' +import { onesCount64, log2Ceil } from '../uint64.js' +import * as Node from '../node.js' + +const NODE_SIZE = BigInt(Node.Size) /** * Validates that given `size` is a valid {@link API.PaddedPieceSize} and @@ -48,3 +51,10 @@ export const validate = (size) => { * @returns {API.UnpaddedPieceSize} */ export const toUnpaddedSize = (size) => size - size / 128n + +/** + * Calculates the height of the piece tree from unpadded size. + * + * @param {API.PaddedPieceSize} size + */ +export const toHeight = (size) => log2Ceil(size / NODE_SIZE) diff --git a/src/piece/tree.js b/src/piece/tree.js index 9caa5e9..f2e4a35 100644 --- a/src/piece/tree.js +++ b/src/piece/tree.js @@ -3,9 +3,13 @@ import { Size as NodeSize } from '../node.js' import * as Proof from '../proof.js' export { computeNode } from '../proof.js' +// The value is an unsigned, 32-bit integer that is always numerically greater +// than the highest index in the array. This means our tree can represent a +// piece up to 128 GiB in size. +export const MAX_LEAF_COUNT = 2 ** 32 - 1 + /** - * `newBareTree` allocates that memory needed to construct a tree with a - * specific amount of leafs. + * Allocates a tree for a given amount of leafs. * * The construction rounds the amount of leafs up to the nearest two-power with * zeroed nodes to ensure that the tree is perfect and hence all internal node's @@ -13,25 +17,29 @@ export { computeNode } from '../proof.js' * * @param {number} leafs */ -export function newBareTree(leafs) { - const adjustedLeafs = 1 << Math.ceil(Math.log2(leafs)) - /** @type {API.TreeData} */ - const tree = { - nodes: new Array(Math.ceil(Math.log2(adjustedLeafs)) + 1), - leafs: leafs, +export function allocate(leafs) { + const adjustedLeafs = 2 ** Math.ceil(Math.log2(leafs)) + + if (adjustedLeafs > MAX_LEAF_COUNT) { + throw new RangeError( + `too many leafs ${adjustedLeafs} exceeds ${MAX_LEAF_COUNT} limit` + ) } - for (const level of tree.nodes.keys()) { - tree.nodes[level] = new Array(1 << level) + const height = Math.ceil(Math.log2(adjustedLeafs)) + const nodes = new Array(height + 1) + + for (const level of nodes.keys()) { + nodes[level] = new Array(1 << level) } - return tree + return new PieceTree({ nodes, height }) } /** * @param {API.TreeData} tree */ -export const depth = (tree) => { +const depth = (tree) => { return tree.nodes.length } @@ -62,26 +70,26 @@ export const split = (source) => { /** * @param {API.Fr23Padded} source */ -export const build = (source) => buildFromChunks(split(source)) +export const build = (source) => fromChunks(split(source)) /** * @param {API.MerkleTreeNode[]} chunks */ -export const buildFromChunks = (chunks) => { +export const fromChunks = (chunks) => { if (chunks.length === 0) { throw new RangeError('Empty source') } const leafs = chunks //await Promise.all(chunks.map(truncatedHash)) - return buildFromLeafs(leafs) + return fromLeafs(leafs) } /** * @param {API.MerkleTreeNode[]} leafs * @returns {API.PieceTree} */ -export const buildFromLeafs = (leafs) => { - const tree = newBareTree(leafs.length) +export const fromLeafs = (leafs) => { + const tree = allocate(leafs.length) // Set the padded leaf nodes tree.nodes[depth(tree) - 1] = padLeafs(leafs) let parentNodes = tree.nodes[depth(tree) - 1] @@ -116,26 +124,29 @@ export const padLeafs = (leafs) => { return [...leafs, ...paddingLeafs] } +/** + * @implements {API.PieceTree} + */ class PieceTree { /** - * @param {API.TreeData} model + * @param {object} data + * @param {API.MerkleTreeNode[][]} data.nodes + * @param {number} data.height */ - constructor(model) { - this.model = model + constructor({ nodes, height }) { + this.nodes = nodes + this.height = height } get root() { - return root(this.model) - } - get depth() { - return depth(this.model) + return root(this) } get leafs() { - const { nodes } = this.model + const { nodes } = this return nodes[nodes.length - 1] } get leafCount() { - return this.model.leafs + return 2 ** this.height } /** * @@ -143,7 +154,7 @@ class PieceTree { * @param {number} index */ node(level, index) { - const { nodes } = this.model + const { nodes } = this return nodes[level][index] } } diff --git a/src/piece/unpadded-size.js b/src/piece/unpadded-size.js index c0058b7..dce3c6f 100644 --- a/src/piece/unpadded-size.js +++ b/src/piece/unpadded-size.js @@ -1,5 +1,8 @@ import * as API from '../api.js' -import { trailingZeros64 } from '../uint64.js' +import { trailingZeros64, log2Ceil } from '../uint64.js' +import * as Node from '../node.js' + +const NODE_SIZE = BigInt(Node.Size) /** * Validates that given `size` is a valid {@link API.UnpaddedPieceSize} and @@ -60,3 +63,10 @@ export const validate = (size) => { * @returns {API.PaddedPieceSize} */ export const toPaddedSize = (size) => size + size / 127n + +/** + * Calculates the height of the piece tree from unpadded size. + * + * @param {API.UnpaddedPieceSize} size + */ +export const toHeight = (size) => log2Ceil(toPaddedSize(size) / NODE_SIZE) diff --git a/test/aggregate-tree.spec.js b/test/aggregate-tree.spec.js index 4972be2..54e5e33 100644 --- a/test/aggregate-tree.spec.js +++ b/test/aggregate-tree.spec.js @@ -11,7 +11,7 @@ import * as Proof from '../src/proof.js' */ export const testAggregateTree = { 'basic aggregate tree test': async (assert) => { - const piece = PieceTree.buildFromLeafs([ + const piece = PieceTree.fromLeafs([ Node.of(0x1), Node.empty(), Node.empty(), @@ -32,11 +32,11 @@ export const testAggregateTree = { assert.deepEqual(aggregate.leafCount, BigInt(piece.leafCount)) // depth is the same - assert.deepEqual(aggregate.depth, piece.depth) + assert.deepEqual(aggregate.height, piece.height) }, 'aggregate tree with left padding': async (assert) => { - const piece = PieceTree.buildFromLeafs([ + const piece = PieceTree.fromLeafs([ Node.empty(), Node.empty(), Node.empty(), @@ -162,7 +162,7 @@ export const testAggregateTree = { 'hybrid with 0 leafs': (assert) => { const hybrid = AggregateTree.create(0) - assert.deepEqual(hybrid.depth, 1) + assert.deepEqual(hybrid.height, 0) assert.deepEqual(hybrid.root, Node.empty()) assert.throws(() => hybrid.node(61, 0n), /level too high/) @@ -213,14 +213,14 @@ export const testAggregateTree = { }, 'can clear tree': (assert) => { - const nonEmpty = PieceTree.buildFromLeafs([ + const nonEmpty = PieceTree.fromLeafs([ Node.empty(), Node.empty(), Node.empty(), Node.of(0x1), ]) - const empty = PieceTree.buildFromLeafs([ + const empty = PieceTree.fromLeafs([ Node.empty(), Node.empty(), Node.empty(), diff --git a/test/aggregate.spec.js b/test/aggregate.spec.js index 6129e02..87fe5cd 100644 --- a/test/aggregate.spec.js +++ b/test/aggregate.spec.js @@ -2,6 +2,8 @@ import * as Aggregate from '../src/aggregate.js' import * as Dataset from './piece/vector.js' import * as Piece from '../src/piece.js' import * as Link from 'multiformats/link' +import * as Node from '../src/node.js' +import * as API from '../src/api.js' /** * @type {import("entail").Suite} @@ -26,7 +28,7 @@ export const testAggregate = { Link.parse( 'baga6ea4seaqao7s73y24kcutaosvacpdjgfe5pw76ooefnyqw4ynr3d2y6x2mpq' ), - build.link() + build.link ) }, @@ -35,14 +37,12 @@ export const testAggregate = { size: Piece.PaddedSize.from(1 << 20), }) - const piece = { - root: Link.parse( + builder.write({ + link: Link.parse( 'baga6ea4seaqae5ysjdbsr4b5jhotaz5ooh62jrrdbxwygfpkkfjz44kvywycmgy' - ).multihash.digest, + ), size: Piece.UnpaddedSize.toPaddedSize(Piece.UnpaddedSize.from(520192)), - } - - builder.write(piece) + }) const build = builder.build() @@ -50,7 +50,7 @@ export const testAggregate = { Link.parse( 'baga6ea4seaqko3i6w4rij37dqerctuv4kbakbcylpe6weeu3tjp26fqyd6txcjy' ).toString(), - build.link().toString() + build.link.toString() ) }, 'basic with two pieces': async (assert) => { @@ -58,26 +58,32 @@ export const testAggregate = { size: Aggregate.PaddedSize.from(1 << 20), }) - builder.write({ - root: Link.parse( - 'baga6ea4seaqae5ysjdbsr4b5jhotaz5ooh62jrrdbxwygfpkkfjz44kvywycmgy' - ).multihash.digest, - size: Piece.UnpaddedSize.toPaddedSize(Piece.UnpaddedSize.from(520192)), - }) + builder.write( + Piece.fromString(`{ + "link": { "/": "baga6ea4seaqae5ysjdbsr4b5jhotaz5ooh62jrrdbxwygfpkkfjz44kvywycmgy" }, + "height": ${Piece.UnpaddedSize.toHeight(Piece.UnpaddedSize.from(520192))} + }`) + ) assert.deepEqual( Link.parse( 'baga6ea4seaqko3i6w4rij37dqerctuv4kbakbcylpe6weeu3tjp26fqyd6txcjy' ), - builder.build().link() + builder.build().link ) - builder.write({ - root: Link.parse( - 'baga6ea4seaqnrm2n2g4m23t6rs26obxjw2tjtr7tcho24gepj2naqhevytduyoa' - ).multihash.digest, - size: Piece.UnpaddedSize.toPaddedSize(Piece.UnpaddedSize.from(260096)), - }) + builder.write( + Piece.fromJSON( + Piece.toJSON({ + link: Link.parse( + 'baga6ea4seaqnrm2n2g4m23t6rs26obxjw2tjtr7tcho24gepj2naqhevytduyoa' + ), + size: Piece.UnpaddedSize.toPaddedSize( + Piece.UnpaddedSize.from(260096) + ), + }) + ) + ) const build = builder.build() @@ -85,7 +91,7 @@ export const testAggregate = { Link.parse( 'baga6ea4seaqnqkeoqevjjjfe46wo2lpfclcbmkyms4wkz5srou3vzmr3w3c72bq' ), - build.link() + build.link ) assert.equal(build.size, 1n << 20n) @@ -96,8 +102,8 @@ export const testAggregate = { assert.deepEqual( JSON.stringify(build), JSON.stringify({ - link: build.link(), - size: 1 << 20, + link: build.link, + height: Math.log2((1 << 20) / Node.Size), }) ) }, @@ -109,16 +115,16 @@ export const testAggregate = { builder.write({ size: Piece.PaddedSize.from(131072), - root: Link.parse( + link: Link.parse( `baga6ea4seaqievout3bskdb76gzldeidkhxo6z5zjrnl2jruvwfwvr2uvvpuwdi` - ).multihash.digest, + ), }) const estimate = builder.estimate({ size: Piece.PaddedSize.from(524288), - root: Link.parse( + link: Link.parse( `baga6ea4seaqkzsosscjqdegbhqrlequtm7pbjscwpeqwhrd53cxov5td34vfojy` - ).multihash.digest, + ), }) assert.match(estimate.error, /Pieces are too large to fit/) @@ -127,9 +133,9 @@ export const testAggregate = { () => builder.write({ size: Piece.PaddedSize.from(524288), - root: Link.parse( + link: Link.parse( `baga6ea4seaqkzsosscjqdegbhqrlequtm7pbjscwpeqwhrd53cxov5td34vfojy` - ).multihash.digest, + ), }), /Pieces are too large to fit in the index/ ) @@ -142,16 +148,16 @@ export const testAggregate = { builder.write({ size: Piece.PaddedSize.from(131072), - root: Link.parse( + link: Link.parse( `baga6ea4seaqievout3bskdb76gzldeidkhxo6z5zjrnl2jruvwfwvr2uvvpuwdi` - ).multihash.digest, + ), }) const estimate = builder.estimate({ size: Piece.PaddedSize.from(524288) + 1n, - root: Link.parse( + link: Link.parse( `baga6ea4seaqkzsosscjqdegbhqrlequtm7pbjscwpeqwhrd53cxov5td34vfojy` - ).multihash.digest, + ), }) assert.match(estimate.error, /padded piece size must be a power of 2/) @@ -173,40 +179,41 @@ export const testAggregate = { Link.parse( 'baga6ea4seaqd6rv4mrnqpi7kfqcpazxzhho7pytj3v3woh46dzq2hi3zpztfcjy' ), - build.link() + build.link ) }, 'fails to write when too many pieces are added': async (assert) => { + /** @type {API.PieceInfo[]} */ const pieces = [ { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqae5ysjdbsr4b5jhotaz5ooh62jrrdbxwygfpkkfjz44kvywycmgy' - ).multihash.digest, + ), size: Piece.PaddedSize.from(1 << 7), }, { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqnrm2n2g4m23t6rs26obxjw2tjtr7tcho24gepj2naqhevytduyoa' - ).multihash.digest, + ), size: Piece.PaddedSize.from(1 << 7), }, { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqa2dqkaeaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' - ).multihash.digest, + ), size: Piece.PaddedSize.from(1 << 7), }, { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqa2dqkaeaacaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' - ).multihash.digest, + ), size: Piece.PaddedSize.from(1 << 7), }, { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqa2dqkaeaagaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' - ).multihash.digest, + ), size: Piece.PaddedSize.from(1 << 7), }, ] @@ -225,17 +232,17 @@ export const testAggregate = { const build = Aggregate.build({ pieces: [ { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqae5ysjdbsr4b5jhotaz5ooh62jrrdbxwygfpkkfjz44kvywycmgy' - ).multihash.digest, + ), size: Piece.UnpaddedSize.toPaddedSize( Piece.UnpaddedSize.from(520192) ), }, { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqnrm2n2g4m23t6rs26obxjw2tjtr7tcho24gepj2naqhevytduyoa' - ).multihash.digest, + ), size: Piece.UnpaddedSize.toPaddedSize( Piece.UnpaddedSize.from(260096) ), @@ -248,7 +255,7 @@ export const testAggregate = { Link.parse( 'baga6ea4seaqnqkeoqevjjjfe46wo2lpfclcbmkyms4wkz5srou3vzmr3w3c72bq' ), - build.link() + build.link ) assert.equal(build.size, 1n << 20n) @@ -258,8 +265,8 @@ export const testAggregate = { assert.deepEqual( JSON.stringify(build), JSON.stringify({ - link: build.link(), - size: 1 << 20, + link: build.link, + height: Math.log2((1 << 20) / Node.Size), }) ) }, diff --git a/test/lib.spec.js b/test/lib.spec.js index aaa5359..aebdbf2 100644 --- a/test/lib.spec.js +++ b/test/lib.spec.js @@ -6,19 +6,20 @@ import * as Link from 'multiformats/link' */ export const testLib = { 'test aggregate sample': async (assert) => { + /** @type {Lib.PieceInfo[]} */ const pieces = [ { - root: Link.parse( + link: Link.parse( 'baga6ea4seaqae5ysjdbsr4b5jhotaz5ooh62jrrdbxwygfpkkfjz44kvywycmgy' - ).multihash.digest, + ), size: Lib.Piece.UnpaddedSize.toPaddedSize( Lib.Piece.UnpaddedSize.from(520192) ), }, { - root: Link.parse( + link: Link.parse( `baga6ea4seaqnrm2n2g4m23t6rs26obxjw2tjtr7tcho24gepj2naqhevytduyoa` - ).multihash.digest, + ), size: Lib.Piece.UnpaddedSize.toPaddedSize( Lib.Piece.UnpaddedSize.from(260096) ), diff --git a/test/piece-tree.spec.js b/test/piece-tree.spec.js index 37d0127..67632a4 100644 --- a/test/piece-tree.spec.js +++ b/test/piece-tree.spec.js @@ -1,5 +1,4 @@ import { Tree } from '@web3-storage/data-segment' -import { base16 } from 'multiformats/bases/base16' /** * @type {import("entail").Suite} @@ -8,7 +7,7 @@ export const testTree = { 'throws on empty tree': async (assert) => { let result = null try { - result = Tree.buildFromChunks([]) + result = Tree.fromChunks([]) } catch (error) { result = { catch: error } } @@ -18,9 +17,12 @@ export const testTree = { 'builds from chunks': async (assert) => { const tree = await Tree.build(new Uint8Array(128)) - assert.equal(tree.depth, 3) + assert.equal(tree.height, 2) assert.equal(tree.leafs.length, 4) assert.equal(tree.node(0, 0), tree.root) assert.equal(tree.leafCount, 4) }, + 'throws when exceeding max leaf count': async (assert) => { + assert.throws(() => Tree.allocate(2 ** 32), /too many leafs/) + }, } diff --git a/test/piece.spec.js b/test/piece.spec.js index 1efcfea..7682d80 100644 --- a/test/piece.spec.js +++ b/test/piece.spec.js @@ -1,4 +1,4 @@ -import { Piece } from '@web3-storage/data-segment' +import { Piece, Node } from '@web3-storage/data-segment' import { deriveBuffer } from './util.js' import * as SHA256 from 'sync-multihash-sha2/sha256' import * as raw from 'multiformats/codecs/raw' @@ -24,7 +24,7 @@ export const testPiece = { } assert.ok( - String(result).includes('not defined for inputs shorter than 65 bytes') + String(result).includes('not defined for payloads smaller than 65 bytes') ) }, ...Object.fromEntries( @@ -32,20 +32,63 @@ export const testPiece = { `${data.in.contentSize}\t\t${data.in.cid}`, async (assert) => { const source = deriveBuffer(data.in.contentSize) - const link = createLink(raw.code, SHA256.digest(raw.encode(source))) + const root = SHA256.digest(raw.encode(source)) + const link = createLink(raw.code, root) const piece = await Piece.build(source) assert.deepEqual(link.toString(), data.in.cid, 'same source content') - assert.deepEqual(piece.root, parseLink(data.out.cid).multihash.digest) + assert.deepEqual( + piece.tree.root, + parseLink(data.out.cid).multihash.digest + ) + assert.deepEqual(parseLink(data.out.cid), piece.link) + assert.deepEqual(piece.size, BigInt(data.out.size)) + assert.deepEqual(piece.height, Math.log2(data.out.size / Node.Size)) + assert.deepEqual(piece.paddedSize, data.out.paddedSize) - assert.deepEqual(piece.toJSON(), { + const json = piece.toJSON() + + assert.deepEqual(json, { link: { '/': data.out.cid, }, - contentSize: data.in.contentSize, - paddedSize: data.out.paddedSize, - size: data.out.size, + height: Math.log2(data.out.size / Node.Size), }) + + const view = Piece.fromJSON(json) + assert.deepEqual(view.link, piece.link) + assert.deepEqual(view.size, piece.size) + assert.deepEqual(view.height, piece.height) }, ]) ), + + 'throws if payload is too large': async (assert) => { + // Subclass Uint8Array as we can't actually allocate a buffer this large + class HugePayload extends Uint8Array { + get length() { + return Piece.MAX_PAYLOAD_SIZE + 1 + } + } + + assert.throws( + () => Piece.build(new HugePayload()), + /Payload exceeds maximum supported size/ + ) + }, + + 'toString <-> fromString': async (assert) => { + const source = deriveBuffer(128) + const piece = await Piece.build(source) + + const serialized = piece.toString() + assert.deepEqual(JSON.parse(serialized), { + link: { '/': piece.link.toString() }, + height: piece.height, + }) + + const deserialized = Piece.fromString(serialized) + assert.deepEqual(deserialized.link, piece.link) + assert.deepEqual(deserialized.size, piece.size) + assert.deepEqual(deserialized.height, piece.height) + }, } diff --git a/test/piece/vector.js b/test/piece/vector.js index 323be70..37f366d 100644 --- a/test/piece/vector.js +++ b/test/piece/vector.js @@ -35,5 +35,5 @@ export const createNodeFromInt = (n) => { export const pieces = sizes.map((size, index) => ({ size: Piece.PaddedSize.from(size), - root: createNodeFromInt(index), + link: Piece.createLink(createNodeFromInt(index)), }))