diff --git a/packages/ipfs-unixfs-importer/package.json b/packages/ipfs-unixfs-importer/package.json index 4e441d20..6046330b 100644 --- a/packages/ipfs-unixfs-importer/package.json +++ b/packages/ipfs-unixfs-importer/package.json @@ -42,6 +42,7 @@ "rimraf": "^3.0.2" }, "dependencies": { + "@ipld/dag-pb": "0.0.1", "bl": "^4.1.0", "cids": "^1.1.5", "err-code": "^3.0.1", diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js index 9541698e..918d6f65 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/dir.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/dir.js @@ -3,8 +3,10 @@ const UnixFS = require('ipfs-unixfs') const persist = require('../utils/persist') const { - DAGNode -} = require('ipld-dag-pb') + encode, + prepare +// @ts-ignore +} = require('@ipld/dag-pb') /** * @typedef {import('../').Directory} Directory @@ -20,7 +22,7 @@ const dirBuilder = async (item, block, options) => { mode: item.mode }) - const buffer = new DAGNode(unixfs.marshal()).serialize() + const buffer = encode(prepare({ Data: unixfs.marshal() })) const cid = await persist(buffer, block, options) const path = item.path diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js index e01df504..9dd1b62c 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/buffer-importer.js @@ -3,8 +3,10 @@ const UnixFS = require('ipfs-unixfs') const persist = require('../../utils/persist') const { - DAGNode -} = require('ipld-dag-pb') + encode, + prepare +// @ts-ignore +} = require('@ipld/dag-pb') /** * @typedef {import('../../').BlockAPI} BlockAPI @@ -38,7 +40,7 @@ async function * bufferImporter (file, block, options) { mode: file.mode }) - buffer = new DAGNode(unixfs.marshal()).serialize() + buffer = encode(prepare({ Data: unixfs.marshal() })) } return { diff --git a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js index c4ce9892..d060f9ce 100644 --- a/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js +++ b/packages/ipfs-unixfs-importer/src/dag-builder/file/index.js @@ -4,9 +4,10 @@ const errCode = require('err-code') const UnixFS = require('ipfs-unixfs') const persist = require('../../utils/persist') const { - DAGNode, - DAGLink -} = require('ipld-dag-pb') + encode, + prepare +// @ts-ignore +} = require('@ipld/dag-pb') const all = require('it-all') const parallelBatch = require('it-parallel-batch') const mh = require('multihashing-async').multihash @@ -93,7 +94,7 @@ const reduce = (file, block, options) => { }) const multihash = mh.decode(leaf.cid.multihash) - buffer = new DAGNode(leaf.unixfs.marshal()).serialize() + buffer = encode(prepare({ Data: leaf.unixfs.marshal() })) leaf.cid = await persist(buffer, block, { ...options, @@ -136,7 +137,11 @@ const reduce = (file, block, options) => { // node is a leaf buffer f.addBlockSize(leaf.size) - return new DAGLink('', leaf.size, leaf.cid) + return { + Name: '', + Tsize: leaf.size, + Hash: leaf.cid + } } if (!leaf.unixfs || !leaf.unixfs.data) { @@ -147,11 +152,18 @@ const reduce = (file, block, options) => { f.addBlockSize(leaf.unixfs.data.length) } - return new DAGLink('', leaf.size, leaf.cid) + return { + Name: '', + Tsize: leaf.size, + Hash: leaf.cid + } }) - const node = new DAGNode(f.marshal(), links) - const buffer = node.serialize() + const node = { + Data: f.marshal(), + Links: links + } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, options) return { diff --git a/packages/ipfs-unixfs-importer/src/dir-flat.js b/packages/ipfs-unixfs-importer/src/dir-flat.js index e292f8e8..d3904f43 100644 --- a/packages/ipfs-unixfs-importer/src/dir-flat.js +++ b/packages/ipfs-unixfs-importer/src/dir-flat.js @@ -1,9 +1,10 @@ 'use strict' const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') + encode, + prepare +// @ts-ignore +} = require('@ipld/dag-pb') const UnixFS = require('ipfs-unixfs') const Dir = require('./dir') const persist = require('./utils/persist') @@ -92,7 +93,11 @@ class DirFlat extends Dir { } if (child.size != null && child.cid) { - links.push(new DAGLink(children[i], child.size, child.cid)) + links.push({ + Name: children[i], + Tsize: child.size, + Hash: child.cid + }) } } @@ -102,13 +107,13 @@ class DirFlat extends Dir { mode: this.mode }) - const node = new DAGNode(unixfs.marshal(), links) - const buffer = node.serialize() + const node = { Data: unixfs.marshal(), Links: links } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, this.options) const size = buffer.length + node.Links.reduce( /** * @param {number} acc - * @param {DAGLink} curr + * @param {{ Name: string, Tsize: number, Hash: CID }} curr */ (acc, curr) => acc + curr.Tsize, 0) diff --git a/packages/ipfs-unixfs-importer/src/dir-sharded.js b/packages/ipfs-unixfs-importer/src/dir-sharded.js index 4e9277a1..2d03344a 100644 --- a/packages/ipfs-unixfs-importer/src/dir-sharded.js +++ b/packages/ipfs-unixfs-importer/src/dir-sharded.js @@ -1,9 +1,10 @@ 'use strict' const { - DAGLink, - DAGNode -} = require('ipld-dag-pb') + encode, + prepare +// @ts-ignore +} = require('@ipld/dag-pb') const UnixFS = require('ipfs-unixfs') const Dir = require('./dir') const persist = require('./utils/persist') @@ -119,7 +120,11 @@ async function * flush (bucket, block, shardRoot, options) { throw new Error('Could not flush sharded directory, no subshard found') } - links.push(new DAGLink(labelPrefix, shard.size, shard.cid)) + links.push({ + Name: labelPrefix, + Tsize: shard.size, + Hash: shard.cid + }) childrenSize += shard.size } else if (typeof child.value.flush === 'function') { const dir = child.value @@ -132,7 +137,11 @@ async function * flush (bucket, block, shardRoot, options) { } const label = labelPrefix + child.key - links.push(new DAGLink(label, flushedDir.size, flushedDir.cid)) + links.push({ + Name: label, + Tsize: flushedDir.size, + Hash: flushedDir.cid + }) childrenSize += flushedDir.size } else { @@ -145,7 +154,11 @@ async function * flush (bucket, block, shardRoot, options) { const label = labelPrefix + child.key const size = value.size - links.push(new DAGLink(label, size, value.cid)) + links.push({ + Name: label, + Tsize: size, + Hash: value.cid + }) childrenSize += size } } @@ -162,8 +175,11 @@ async function * flush (bucket, block, shardRoot, options) { mode: shardRoot && shardRoot.mode }) - const node = new DAGNode(dir.marshal(), links) - const buffer = node.serialize() + const node = { + Data: dir.marshal(), + Links: links + } + const buffer = encode(prepare(node)) const cid = await persist(buffer, block, options) const size = buffer.length + childrenSize