Skip to content

Commit

Permalink
fix: use @ipld/dag-pb instead of ipld-dag-pb
Browse files Browse the repository at this point in the history
Use the rewrite of the dag-pb codec.
  • Loading branch information
vmx committed Feb 23, 2021
1 parent 9a2b5f2 commit 6f7d6f4
Show file tree
Hide file tree
Showing 6 changed files with 67 additions and 29 deletions.
1 change: 1 addition & 0 deletions packages/ipfs-unixfs-importer/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
"rimraf": "^3.0.2"
},
"dependencies": {
"@ipld/dag-pb": "0.0.1",
"bl": "^4.1.0",
"cids": "^1.1.5",
"err-code": "^3.0.1",
Expand Down
8 changes: 5 additions & 3 deletions packages/ipfs-unixfs-importer/src/dag-builder/dir.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
const UnixFS = require('ipfs-unixfs')
const persist = require('../utils/persist')
const {
DAGNode
} = require('ipld-dag-pb')
encode,
prepare
// @ts-ignore
} = require('@ipld/dag-pb')

/**
* @typedef {import('../').Directory} Directory
Expand All @@ -20,7 +22,7 @@ const dirBuilder = async (item, block, options) => {
mode: item.mode
})

const buffer = new DAGNode(unixfs.marshal()).serialize()
const buffer = encode(prepare({ Data: unixfs.marshal() }))
const cid = await persist(buffer, block, options)
const path = item.path

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
const UnixFS = require('ipfs-unixfs')
const persist = require('../../utils/persist')
const {
DAGNode
} = require('ipld-dag-pb')
encode,
prepare
// @ts-ignore
} = require('@ipld/dag-pb')

/**
* @typedef {import('../../').BlockAPI} BlockAPI
Expand Down Expand Up @@ -38,7 +40,7 @@ async function * bufferImporter (file, block, options) {
mode: file.mode
})

buffer = new DAGNode(unixfs.marshal()).serialize()
buffer = encode(prepare({ Data: unixfs.marshal() }))
}

return {
Expand Down
28 changes: 20 additions & 8 deletions packages/ipfs-unixfs-importer/src/dag-builder/file/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ const errCode = require('err-code')
const UnixFS = require('ipfs-unixfs')
const persist = require('../../utils/persist')
const {
DAGNode,
DAGLink
} = require('ipld-dag-pb')
encode,
prepare
// @ts-ignore
} = require('@ipld/dag-pb')
const all = require('it-all')
const parallelBatch = require('it-parallel-batch')
const mh = require('multihashing-async').multihash
Expand Down Expand Up @@ -93,7 +94,7 @@ const reduce = (file, block, options) => {
})

const multihash = mh.decode(leaf.cid.multihash)
buffer = new DAGNode(leaf.unixfs.marshal()).serialize()
buffer = encode(prepare({ Data: leaf.unixfs.marshal() }))

leaf.cid = await persist(buffer, block, {
...options,
Expand Down Expand Up @@ -136,7 +137,11 @@ const reduce = (file, block, options) => {
// node is a leaf buffer
f.addBlockSize(leaf.size)

return new DAGLink('', leaf.size, leaf.cid)
return {
Name: '',
Tsize: leaf.size,
Hash: leaf.cid
}
}

if (!leaf.unixfs || !leaf.unixfs.data) {
Expand All @@ -147,11 +152,18 @@ const reduce = (file, block, options) => {
f.addBlockSize(leaf.unixfs.data.length)
}

return new DAGLink('', leaf.size, leaf.cid)
return {
Name: '',
Tsize: leaf.size,
Hash: leaf.cid
}
})

const node = new DAGNode(f.marshal(), links)
const buffer = node.serialize()
const node = {
Data: f.marshal(),
Links: links
}
const buffer = encode(prepare(node))
const cid = await persist(buffer, block, options)

return {
Expand Down
19 changes: 12 additions & 7 deletions packages/ipfs-unixfs-importer/src/dir-flat.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
'use strict'

const {
DAGLink,
DAGNode
} = require('ipld-dag-pb')
encode,
prepare
// @ts-ignore
} = require('@ipld/dag-pb')
const UnixFS = require('ipfs-unixfs')
const Dir = require('./dir')
const persist = require('./utils/persist')
Expand Down Expand Up @@ -92,7 +93,11 @@ class DirFlat extends Dir {
}

if (child.size != null && child.cid) {
links.push(new DAGLink(children[i], child.size, child.cid))
links.push({
Name: children[i],
Tsize: child.size,
Hash: child.cid
})
}
}

Expand All @@ -102,13 +107,13 @@ class DirFlat extends Dir {
mode: this.mode
})

const node = new DAGNode(unixfs.marshal(), links)
const buffer = node.serialize()
const node = { Data: unixfs.marshal(), Links: links }
const buffer = encode(prepare(node))
const cid = await persist(buffer, block, this.options)
const size = buffer.length + node.Links.reduce(
/**
* @param {number} acc
* @param {DAGLink} curr
* @param {{ Name: string, Tsize: number, Hash: CID }} curr
*/
(acc, curr) => acc + curr.Tsize,
0)
Expand Down
32 changes: 24 additions & 8 deletions packages/ipfs-unixfs-importer/src/dir-sharded.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
'use strict'

const {
DAGLink,
DAGNode
} = require('ipld-dag-pb')
encode,
prepare
// @ts-ignore
} = require('@ipld/dag-pb')
const UnixFS = require('ipfs-unixfs')
const Dir = require('./dir')
const persist = require('./utils/persist')
Expand Down Expand Up @@ -119,7 +120,11 @@ async function * flush (bucket, block, shardRoot, options) {
throw new Error('Could not flush sharded directory, no subshard found')
}

links.push(new DAGLink(labelPrefix, shard.size, shard.cid))
links.push({
Name: labelPrefix,
Tsize: shard.size,
Hash: shard.cid
})
childrenSize += shard.size
} else if (typeof child.value.flush === 'function') {
const dir = child.value
Expand All @@ -132,7 +137,11 @@ async function * flush (bucket, block, shardRoot, options) {
}

const label = labelPrefix + child.key
links.push(new DAGLink(label, flushedDir.size, flushedDir.cid))
links.push({
Name: label,
Tsize: flushedDir.size,
Hash: flushedDir.cid
})

childrenSize += flushedDir.size
} else {
Expand All @@ -145,7 +154,11 @@ async function * flush (bucket, block, shardRoot, options) {
const label = labelPrefix + child.key
const size = value.size

links.push(new DAGLink(label, size, value.cid))
links.push({
Name: label,
Tsize: size,
Hash: value.cid
})
childrenSize += size
}
}
Expand All @@ -162,8 +175,11 @@ async function * flush (bucket, block, shardRoot, options) {
mode: shardRoot && shardRoot.mode
})

const node = new DAGNode(dir.marshal(), links)
const buffer = node.serialize()
const node = {
Data: dir.marshal(),
Links: links
}
const buffer = encode(prepare(node))
const cid = await persist(buffer, block, options)
const size = buffer.length + childrenSize

Expand Down

0 comments on commit 6f7d6f4

Please sign in to comment.