diff --git a/.github/workflows/bundlesize.yml b/.github/workflows/bundlesize.yml new file mode 100644 index 0000000000..6c298295c6 --- /dev/null +++ b/.github/workflows/bundlesize.yml @@ -0,0 +1,39 @@ +# Checks bundlesize, does not run on master +on: + pull_request: + branches: + - '*' + +name: Bundlesize +jobs: + check: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [12.x] + project: + - packages/ipfs-core-utils + - packages/ipfs-core + - packages/ipfs-http-client + - packages/ipfs + - packages/ipfs-message-port-protocol + - packages/ipfs-message-port-client + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Restore dependencies + id: cache-modules + uses: actions/checkout@v2 + with: + path: node_modules + key: ${{ matrix.node-version }}-${{ runner.OS }}-build-${{ hashFiles('package.json') }} + - name: Install dependencies + run: npm install + - name: Bundlesize ${{ matrix.project }} + uses: ipfs/aegir/actions/bundle-size@v28.0.0 + with: + project: ${{ matrix.project }} + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index a89bde0263..0000000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,18 +0,0 @@ -# Checks bundlesize, does not run on master -name: Bundlesize - -on: - push: - branches: - - '*' - - '!master' - -jobs: - checks: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - run: npm i - - uses: ipfs/aegir/actions/bundle-size@master - with: - github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/typecheck.yml b/.github/workflows/typecheck.yml new file mode 100644 index 0000000000..7ea91b157b --- /dev/null +++ b/.github/workflows/typecheck.yml @@ -0,0 +1,42 @@ +on: + pull_request: + branches: + - '*' + +name: Typecheck +jobs: + check: + runs-on: ubuntu-latest + strategy: + matrix: + node-version: [12.x] + project: + - packages/ipfs-core-utils + - packages/ipfs-core + - packages/ipfs-http-client + - packages/ipfs + - packages/ipfs-cli + - packages/ipfs-http-server + - packages/ipfs-http-gateway + - packages/ipfs-message-port-protocol + - packages/ipfs-message-port-server + - packages/ipfs-message-port-client + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Restore dependencies + id: cache-modules + uses: actions/checkout@v2 + with: + path: node_modules + key: ${{ matrix.node-version }}-${{ runner.OS }}-build-${{ hashFiles('package.json') }} + - name: Install dependencies + run: npm install + - name: Typecheck ${{ matrix.project }} + uses: gozala/typescript-error-reporter-action@v1.0.8 + with: + project: ${{ matrix.project }} + diff --git a/.travis.yml b/.travis.yml index 6c7849fe50..21cf72f161 100644 --- a/.travis.yml +++ b/.travis.yml @@ -229,12 +229,13 @@ jobs: - stage: test name: examples script: - - npx json -I -f ./lerna.json -e "this.packages.push('examples/*')" - - npx json -I -f ./lerna.json -e "this.command.bootstrap.nohoist = ['ipfs-css', 'tachyons']" - - npm run reset - - npm install - - npm run build -- --scope={ipfs,ipfs-http-client,ipfs-message-port-*} - - npm run test -- --scope=example* --concurrency=1 + # Travis lets scripts continue even if previous steps fail: https://github.com/travis-ci/travis-ci/issues/1066 + - npm run build -- --scope={ipfs-core,ipfs,ipfs-http-client,ipfs-message-port-*} && + rm -rf node_modules packages/*/node_modules && + npx json -I -f ./lerna.json -e "this.packages.push('examples/*')" && + npx json -I -f ./lerna.json -e "this.command.bootstrap.nohoist = ['ipfs-css', 'tachyons']" && + npm install && + npm run test -- --scope=example* --concurrency=1 - stage: release-rc # only run on changes to master diff --git a/docs/core-api/BITSWAP.md b/docs/core-api/BITSWAP.md index d903cfe94d..786f19ace4 100644 --- a/docs/core-api/BITSWAP.md +++ b/docs/core-api/BITSWAP.md @@ -62,7 +62,7 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Default | Description | | ---- | ---- | ------- | ----------- | -| peerId | [PeerId][], [CID][], `String` or `Buffer` | A peer ID to return the wantlist for | +| peerId | [PeerId][], [CID][], `String` or `Uint8Array` | A peer ID to return the wantlist for | ### Options diff --git a/docs/core-api/BLOCK.md b/docs/core-api/BLOCK.md index aa80b6821f..e0f54f1e85 100644 --- a/docs/core-api/BLOCK.md +++ b/docs/core-api/BLOCK.md @@ -29,7 +29,7 @@ | Name | Type | Description | | ---- | ---- | ----------- | -| cid | [CID][], `String` or `Buffer` | A CID that corresponds to the desired block | +| cid | [CID][], `String` or `Uint8Array` | A CID that corresponds to the desired block | ### Options @@ -63,7 +63,7 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Description | | ---- | ---- | ----------- | -| block | A `Buffer` or [Block][] instance | The block or data to store | +| block | A `Uint8Array` or [Block][] instance | The block or data to store | ### Options diff --git a/docs/core-api/BOOTSTRAP.md b/docs/core-api/BOOTSTRAP.md index dca8cf600a..c215a27b2a 100644 --- a/docs/core-api/BOOTSTRAP.md +++ b/docs/core-api/BOOTSTRAP.md @@ -55,7 +55,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `Promise` | An object that contains an array with all the added addresses | +| `Promise<{ Peers: Array }>` | An object that contains an array with all the added addresses | example of the returned object: @@ -141,7 +141,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `Promise` | An object that contains an array with all the bootstrap addresses | +| `Promise<{ Peers: Array }>` | An object that contains an array with all the bootstrap addresses | example of the returned object: @@ -225,7 +225,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `Promise` | An object that contains an array with all the removed addresses | +| `Promise<{ Peers: Array }>` | An object that contains an array with all the removed addresses | ```JavaScript { diff --git a/docs/core-api/DHT.md b/docs/core-api/DHT.md index a35181541e..e27c88d6e5 100644 --- a/docs/core-api/DHT.md +++ b/docs/core-api/DHT.md @@ -126,7 +126,7 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Description | | ---- | ---- | ----------- | -| key | `Buffer` or `string` | The key associated with the value to find | +| key | `Uint8Array` or `string` | The key associated with the value to find | ### Options @@ -141,7 +141,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `Promise` | The value that was stored under that key | +| `Promise` | The value that was stored under that key | ### Example @@ -228,8 +228,8 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Description | | ---- | ---- | ----------- | -| key | Buffer | The key to put the value as | -| value | Buffer | Value to put | +| key | Uint8Array | The key to put the value as | +| value | Uint8Array | Value to put | ### Options diff --git a/docs/core-api/FILES.md b/docs/core-api/FILES.md index 5898c9aba2..0e18c0faa5 100644 --- a/docs/core-api/FILES.md +++ b/docs/core-api/FILES.md @@ -242,8 +242,8 @@ An optional object which may have the following keys: | Name | Type | Default | Description | | ---- | ---- | ------- | ----------- | -| chunker | `String` | `'size-262144'` | chunking algorithm used to build ipfs DAGs | -| cidVersion | `Number` | `0` | the CID version to use when storing the data | +| chunker | `string` | `'size-262144'` | chunking algorithm used to build ipfs DAGs | +| cidVersion | `number` | `0` | the CID version to use when storing the data | | enableShardingExperiment | `boolean` | `false` | allows to create directories with an unlimited number of entries currently size of unixfs directories is limited by the maximum block size. Note that this is an experimental feature | | hashAlg | `String` | `'sha2-256'` | multihash hashing algorithm to use | | onlyHash | `boolean` | `false` | If true, will not add blocks to the blockstore | @@ -415,7 +415,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `AsyncIterable` | An async iterable that yields [`Buffer`][b] objects with the contents of `path` | +| `AsyncIterable` | An async iterable that yields `Uint8Array` objects with the contents of `path` | #### Example @@ -842,7 +842,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `AsyncIterable` | An async iterable that yields [`Buffer`][b] objects with the contents of `path` | +| `AsyncIterable` | An async iterable that yields `Uint8Array` objects with the contents of `path` | #### Example @@ -866,7 +866,7 @@ console.log(uint8ArrayConcat(chunks).toString()) | Name | Type | Description | | ---- | ---- | ----------- | | path | `String` | The [MFS path] where you will write to | -| content | `String`, `Buffer`, `AsyncIterable` or [`Blob`][blob] | The content to write to the path | +| content | `String`, `Uint8Array`, `AsyncIterable` or [`Blob`][blob] | The content to write to the path | #### Options @@ -902,7 +902,7 @@ await ipfs.files.write('/hello-world', new TextEncoder().encode('Hello, world!') ### `ipfs.files.mv(...from, to, [options])` -> Move files from one location to another#### Parameters +> Move files from one location to another #### Parameters diff --git a/docs/core-api/OBJECT.md b/docs/core-api/OBJECT.md index faff11c532..3f1fdc011f 100644 --- a/docs/core-api/OBJECT.md +++ b/docs/core-api/OBJECT.md @@ -100,7 +100,7 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Description | | ---- | ---- | ----------- | -| obj | `Object{ Data: , Links: [] }`, `Buffer` or [DAGNode][] | The MerkleDAG Node to be stored | +| obj | `Object{ Data: , Links: [] }`, `Uint8Array` or [DAGNode][] | The MerkleDAG Node to be stored | ### Options @@ -108,7 +108,7 @@ An optional object which may have the following keys: | Name | Type | Default | Description | | ---- | ---- | ------- | ----------- | -| enc | `String` | `undefined` | The encoding of the Buffer (json, yml, etc), if passed a Buffer | +| enc | `String` | `undefined` | The encoding of the Uint8Array (json, yml, etc), if passed a Uint8Array | | timeout | `Number` | `undefined` | A timeout in ms | | signal | [AbortSignal][] | `undefined` | Can be used to cancel any long running requests started as a result of this call | @@ -195,7 +195,7 @@ An optional object which may have the following keys: | Type | Description | | -------- | -------- | -| `Promise` | An Promise that resolves to Buffer objects with the data that the MerkleDAG node contained | +| `Promise` | An Promise that resolves to Uint8Array objects with the data that the MerkleDAG node contained | ### Example @@ -431,7 +431,7 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Description | | ---- | ---- | ----------- | | cid | [CID][] | Add data to the [DAGNode][] that corresponds to this CID | -| data | `Buffer` | The data to append to the `.Data` field of the node | +| data | `Uint8Array` | The data to append to the `.Data` field of the node | ### Options @@ -465,7 +465,7 @@ A great source of [examples][] can be found in the tests for this API. | Name | Type | Description | | ---- | ---- | ----------- | | cid | [CID][] | Replace data of the [DAGNode][] that corresponds to this CID | -| data | `Buffer` | The data to overwrite with | +| data | `Uint8Array` | The data to overwrite with | ### Options diff --git a/docs/core-api/PUBSUB.md b/docs/core-api/PUBSUB.md index cd4a62bf36..4778181e6b 100644 --- a/docs/core-api/PUBSUB.md +++ b/docs/core-api/PUBSUB.md @@ -32,7 +32,7 @@ | Name | Type | Description | | ---- | ---- | ----------- | | topic | `String` | The topic name | -| handler | `Function<(msg) => {}>` | Event handler which will be called with a message object everytime one is received. The `msg` has the format `{from: String, seqno: Buffer, data: Buffer, topicIDs: Array}` | +| handler | `Function<(msg) => {}>` | Event handler which will be called with a message object everytime one is received. The `msg` has the format `{from: String, seqno: Uint8Array, data: Uint8Array, topicIDs: Array}` | ### Options @@ -125,7 +125,7 @@ If **only** the `topic` param is provided, unsubscribe will remove **all** handl > Publish a data message to a pubsub topic. - `topic: String` -- `data: Buffer|String` - The message to send +- `data: Uint8Array|String` - The message to send ### Returns diff --git a/examples/browser-add-readable-stream/index.js b/examples/browser-add-readable-stream/index.js index 6db3128f7b..4c870f2193 100644 --- a/examples/browser-add-readable-stream/index.js +++ b/examples/browser-add-readable-stream/index.js @@ -27,7 +27,7 @@ const createFiles = (directory) => { return [{ path: `${directory}/file1.txt`, - // content could be a stream, a url, a Buffer, a File etc + // content could be a stream, a url, a Uint8Array, a File etc content: 'one' }, { path: `${directory}/file2.txt`, diff --git a/examples/browser-ipns-publish/index.js b/examples/browser-ipns-publish/index.js index 7e916c5940..e0e5ef72d2 100644 --- a/examples/browser-ipns-publish/index.js +++ b/examples/browser-ipns-publish/index.js @@ -7,6 +7,7 @@ const pRetry = require("p-retry"); const last = require("it-last"); const cryptoKeys = require("human-crypto-keys"); // { getKeyPairFromSeed } const uint8ArrayToString = require('uint8arrays/to-string') +const uint8ArrayFromString = require('uint8arrays/from-string') const { sleep, Logger, onEnterPress, catchAndLog } = require("./util"); @@ -142,7 +143,7 @@ async function main() { try { // quick and dirty key gen, don't do this in real life const key = await IPFS.multihashing.digest( - Buffer.from(keyName + Math.random().toString(36).substring(2)), + uint8ArrayFromString(keyName + Math.random().toString(36).substring(2)), "sha2-256" ); const keyPair = await cryptoKeys.getKeyPairFromSeed(key, "rsa"); diff --git a/examples/traverse-ipld-graphs/CHANGELOG.md b/examples/traverse-ipld-graphs/CHANGELOG.md index e91e315784..43971169c9 100644 --- a/examples/traverse-ipld-graphs/CHANGELOG.md +++ b/examples/traverse-ipld-graphs/CHANGELOG.md @@ -87,7 +87,7 @@ Output: ```js { link: { '/': - } } + } } ``` Now: @@ -108,7 +108,7 @@ Output: codec: 'dag-pb', version: 0, multihash: - } } + } } ``` See https://github.com/ipld/ipld/issues/44 for more information on why this diff --git a/package.json b/package.json index 2f2384deb6..979c16ad07 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ "test:interface:message-port-client": "lerna run test:interface:message-port-client", "coverage": "lerna run coverage", "build": "lerna run build", + "build:types": "lerna run build:types", "clean": "lerna run clean", "lint": "lerna run lint", "dep-check": "lerna run dep-check", diff --git a/packages/interface-ipfs-core/package.json b/packages/interface-ipfs-core/package.json index dc42af9469..d7c8a82edd 100644 --- a/packages/interface-ipfs-core/package.json +++ b/packages/interface-ipfs-core/package.json @@ -66,7 +66,7 @@ "uint8arrays": "^1.1.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "ipfsd-ctl": "^7.0.2" }, "contributors": [ diff --git a/packages/interface-ipfs-core/src/add.js b/packages/interface-ipfs-core/src/add.js index 2c9dc08f98..fdf27d281e 100644 --- a/packages/interface-ipfs-core/src/add.js +++ b/packages/interface-ipfs-core/src/add.js @@ -80,7 +80,7 @@ module.exports = (common, options) => { expect(fileAdded.cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC') }) - it('should add a Buffer', async () => { + it('should add a Uint8Array', async () => { const file = await ipfs.add(fixtures.smallFile.data) expect(file.cid.toString()).to.equal(fixtures.smallFile.cid) @@ -89,7 +89,7 @@ module.exports = (common, options) => { expect(file.size).greaterThan(fixtures.smallFile.data.length) }) - it('should add a BIG Buffer', async () => { + it('should add a BIG Uint8Array', async () => { const file = await ipfs.add(fixtures.bigFile.data) expect(file.cid.toString()).to.equal(fixtures.bigFile.cid) @@ -98,7 +98,7 @@ module.exports = (common, options) => { expect(file.size).greaterThan(fixtures.bigFile.data.length) }) - it('should add a BIG Buffer with progress enabled', async () => { + it('should add a BIG Uint8Array with progress enabled', async () => { let progCalled = false let accumProgress = 0 function handler (p) { @@ -137,7 +137,7 @@ module.exports = (common, options) => { expect(file.path).to.equal(fixtures.emptyFile.cid) }) - it('should add a Buffer as tuple', async () => { + it('should add a Uint8Array as tuple', async () => { const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data } const file = await ipfs.add(tuple) diff --git a/packages/interface-ipfs-core/src/bootstrap/add.js b/packages/interface-ipfs-core/src/bootstrap/add.js index 96416d7348..162483bced 100644 --- a/packages/interface-ipfs-core/src/bootstrap/add.js +++ b/packages/interface-ipfs-core/src/bootstrap/add.js @@ -3,9 +3,10 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') +const Multiaddr = require('multiaddr') const invalidArg = 'this/Is/So/Invalid/' -const validIp4 = '/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' +const validIp4 = new Multiaddr('/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -60,13 +61,15 @@ module.exports = (common, options) => { }) it('add a peer to the bootstrap list', async () => { - const peer = '/ip4/111.111.111.111/tcp/1001/p2p/QmXFX2P5ammdmXQgfqGkfswtEVFsZUJ5KeHRXQYCTdiTAb' + const peer = new Multiaddr('/ip4/111.111.111.111/tcp/1001/p2p/QmXFX2P5ammdmXQgfqGkfswtEVFsZUJ5KeHRXQYCTdiTAb') const res = await ipfs.bootstrap.add(peer) expect(res).to.be.eql({ Peers: [peer] }) const list = await ipfs.bootstrap.list() - expect(list.Peers).to.include(peer) + expect(list.Peers).to.deep.include(peer) + + expect(list.Peers.every(ma => Multiaddr.isMultiaddr(ma))).to.be.true() }) }) } diff --git a/packages/interface-ipfs-core/src/bootstrap/clear.js b/packages/interface-ipfs-core/src/bootstrap/clear.js index 2d65561b9c..374eed4194 100644 --- a/packages/interface-ipfs-core/src/bootstrap/clear.js +++ b/packages/interface-ipfs-core/src/bootstrap/clear.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') +const Multiaddr = require('multiaddr') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -13,7 +14,7 @@ module.exports = (common, options) => { const describe = getDescribe(options) const it = getIt(options) - const validIp4 = '/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' + const validIp4 = new Multiaddr('/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z') describe('.bootstrap.clear', function () { this.timeout(100 * 1000) @@ -51,6 +52,8 @@ module.exports = (common, options) => { const removedPeers = rmRes.Peers expect(removedPeers.sort()).to.deep.equal(addedPeers.sort()) + + expect(removedPeers.every(ma => Multiaddr.isMultiaddr(ma))).to.be.true() }) }) } diff --git a/packages/interface-ipfs-core/src/bootstrap/list.js b/packages/interface-ipfs-core/src/bootstrap/list.js index df8ee91005..868b1191b2 100644 --- a/packages/interface-ipfs-core/src/bootstrap/list.js +++ b/packages/interface-ipfs-core/src/bootstrap/list.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') +const Multiaddr = require('multiaddr') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -33,6 +34,7 @@ module.exports = (common, options) => { const peers = res.Peers expect(peers).to.be.an('Array') + expect(peers.every(ma => Multiaddr.isMultiaddr(ma))).to.be.true() }) }) } diff --git a/packages/interface-ipfs-core/src/bootstrap/reset.js b/packages/interface-ipfs-core/src/bootstrap/reset.js index 0f34242f85..fe0c78bd7e 100644 --- a/packages/interface-ipfs-core/src/bootstrap/reset.js +++ b/packages/interface-ipfs-core/src/bootstrap/reset.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') +const Multiaddr = require('multiaddr') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -45,6 +46,7 @@ module.exports = (common, options) => { const removedPeers = rmRes.Peers expect(removedPeers.sort()).to.deep.equal(addedPeers.sort()) + expect(addedPeers.every(ma => Multiaddr.isMultiaddr(ma))).to.be.true() }) }) } diff --git a/packages/interface-ipfs-core/src/bootstrap/rm.js b/packages/interface-ipfs-core/src/bootstrap/rm.js index 09c099e0b5..ec0b5e59a8 100644 --- a/packages/interface-ipfs-core/src/bootstrap/rm.js +++ b/packages/interface-ipfs-core/src/bootstrap/rm.js @@ -3,6 +3,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha') const testTimeout = require('../utils/test-timeout') +const Multiaddr = require('multiaddr') /** @typedef { import("ipfsd-ctl/src/factory") } Factory */ /** @@ -14,7 +15,7 @@ module.exports = (common, options) => { const it = getIt(options) const invalidArg = 'this/Is/So/Invalid/' - const validIp4 = '/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z' + const validIp4 = new Multiaddr('/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z') describe('.bootstrap.rm', function () { this.timeout(100 * 1000) @@ -48,16 +49,17 @@ module.exports = (common, options) => { }) it('removes a peer from the bootstrap list', async () => { - const peer = '/ip4/111.111.111.111/tcp/1001/p2p/QmXFX2P5ammdmXQgfqGkfswtEVFsZUJ5KeHRXQYCTdiTAb' + const peer = new Multiaddr('/ip4/111.111.111.111/tcp/1001/p2p/QmXFX2P5ammdmXQgfqGkfswtEVFsZUJ5KeHRXQYCTdiTAb') await ipfs.bootstrap.add(peer) let list = await ipfs.bootstrap.list() - expect(list.Peers).to.include(peer) + expect(list.Peers).to.deep.include(peer) const res = await ipfs.bootstrap.rm(peer) expect(res).to.be.eql({ Peers: [peer] }) list = await ipfs.bootstrap.list() - expect(list.Peers).to.not.include(peer) + expect(list.Peers).to.not.deep.include(peer) + expect(res.Peers.every(ma => Multiaddr.isMultiaddr(ma))).to.be.true() }) }) } diff --git a/packages/interface-ipfs-core/src/files/read.js b/packages/interface-ipfs-core/src/files/read.js index b33dc0b13a..124cd6aad0 100644 --- a/packages/interface-ipfs-core/src/files/read.js +++ b/packages/interface-ipfs-core/src/files/read.js @@ -73,22 +73,6 @@ module.exports = (common, options) => { expect(bytes).to.deep.equal(data.slice(0, length)) }) - it('reads a file with a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = randomBytes(100) - const length = 10 - - await ipfs.files.write(path, data, { - create: true - }) - - const buffer = uint8ArrayConcat(await all(ipfs.files.read(path, { - count: length - }))) - - expect(buffer).to.deep.equal(data.slice(0, length)) - }) - it('reads a file with an offset and a length', async () => { const path = `/some-file-${Math.random()}.txt` const data = randomBytes(100) @@ -107,24 +91,6 @@ module.exports = (common, options) => { expect(buffer).to.deep.equal(data.slice(offset, offset + length)) }) - it('reads a file with an offset and a legacy count argument', async () => { - const path = `/some-file-${Math.random()}.txt` - const data = randomBytes(100) - const offset = 10 - const length = 10 - - await ipfs.files.write(path, data, { - create: true - }) - - const buffer = uint8ArrayConcat(await all(ipfs.files.read(path, { - offset, - count: length - }))) - - expect(buffer).to.deep.equal(data.slice(offset, offset + length)) - }) - it('refuses to read a directory', async () => { const path = '/' diff --git a/packages/interface-ipfs-core/src/object/get.js b/packages/interface-ipfs-core/src/object/get.js index 7ed9ef4df1..97b4472c3c 100644 --- a/packages/interface-ipfs-core/src/object/get.js +++ b/packages/interface-ipfs-core/src/object/get.js @@ -49,7 +49,7 @@ module.exports = (common, options) => { let node2 = await ipfs.object.get(node1Cid) // because js-ipfs-api can't infer if the - // returned Data is Buffer or String + // returned Data is Uint8Array or String if (typeof node2.Data === 'string') { node2 = new DAGNode(uint8ArrayFromString(node2.Data), node2.Links, node2.size) } @@ -69,7 +69,7 @@ module.exports = (common, options) => { let node2 = await ipfs.object.get(node1Cid.toBaseEncodedString()) // because js-ipfs-api can't infer if the - // returned Data is Buffer or String + // returned Data is Uint8Array or String if (typeof node2.Data === 'string') { node2 = new DAGNode(uint8ArrayFromString(node2.Data), node2.Links, node2.size) } @@ -89,7 +89,7 @@ module.exports = (common, options) => { let node1c = await ipfs.object.get(node1bCid) // because js-ipfs-api can't infer if the - // returned Data is Buffer or String + // returned Data is Uint8Array or String if (typeof node1c.Data === 'string') { node1c = new DAGNode(uint8ArrayFromString(node1c.Data), node1c.Links, node1c.size) } @@ -108,7 +108,7 @@ module.exports = (common, options) => { let node1b = await ipfs.object.get(node1aCid, { enc: 'base58' }) // because js-ipfs-api can't infer if the - // returned Data is Buffer or String + // returned Data is Uint8Array or String if (typeof node1b.Data === 'string') { node1b = new DAGNode(uint8ArrayFromString(node1b.Data), node1b.Links, node1b.size) } @@ -128,7 +128,7 @@ module.exports = (common, options) => { let node1b = await ipfs.object.get(node1aCid.toBaseEncodedString(), { enc: 'base58' }) // because js-ipfs-api can't infer if the - // returned Data is Buffer or String + // returned Data is Uint8Array or String if (typeof node1b.Data === 'string') { node1b = new DAGNode(uint8ArrayFromString(node1b.Data), node1b.Links, node1b.size) } diff --git a/packages/interface-ipfs-core/src/object/put.js b/packages/interface-ipfs-core/src/object/put.js index 5fbfa32c66..5d1483306b 100644 --- a/packages/interface-ipfs-core/src/object/put.js +++ b/packages/interface-ipfs-core/src/object/put.js @@ -49,7 +49,7 @@ module.exports = (common, options) => { expect(obj.Links).to.deep.equal(nodeJSON.links) }) - it('should put a JSON encoded Buffer', async () => { + it('should put a JSON encoded Uint8Array', async () => { const obj = { Data: uint8ArrayFromString(nanoid()), Links: [] @@ -69,7 +69,7 @@ module.exports = (common, options) => { expect(nodeJSON.data).to.eql(node.Data) }) - it('should put a Protobuf encoded Buffer', async () => { + it('should put a Protobuf encoded Uint8Array', async () => { const node = new DAGNode(uint8ArrayFromString(nanoid())) const serialized = node.serialize() @@ -79,7 +79,7 @@ module.exports = (common, options) => { expect(node2.Links).to.deep.equal(node.Links) }) - it('should put a Buffer as data', async () => { + it('should put a Uint8Array as data', async () => { const data = uint8ArrayFromString(nanoid()) const cid = await ipfs.object.put(data) diff --git a/packages/interface-ipfs-core/test/fixtures/weird name folder [v0]/add b/packages/interface-ipfs-core/test/fixtures/weird name folder [v0]/add index ce7fb7cc8c..1ad5e7c04e 100644 --- a/packages/interface-ipfs-core/test/fixtures/weird name folder [v0]/add +++ b/packages/interface-ipfs-core/test/fixtures/weird name folder [v0]/add @@ -5,7 +5,7 @@ const ipfs = require('../src')('localhost', 5001) const f1 = 'Hello' const f2 = 'World' -ipfs.add([new Buffer(f1), new Buffer(f2)], function (err, res) { +ipfs.add([Uint8Array.from(f1), Uint8Array,from(f2)], function (err, res) { if (err || !res) return console.log(err) for (let i = 0; i < res.length; i++) { diff --git a/packages/ipfs-cli/package.json b/packages/ipfs-cli/package.json index 7985f1feb8..0be4d497cd 100644 --- a/packages/ipfs-cli/package.json +++ b/packages/ipfs-cli/package.json @@ -20,7 +20,8 @@ }, "scripts": { "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:types", + "build:types": "tsc --build", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", @@ -56,8 +57,8 @@ "just-safe-set": "^2.1.0", "libp2p": "^0.29.0", "libp2p-crypto": "^0.18.0", - "libp2p-delegated-content-routing": "^0.7.0", - "libp2p-delegated-peer-routing": "^0.7.0", + "libp2p-delegated-content-routing": "^0.8.0", + "libp2p-delegated-peer-routing": "^0.8.0", "libp2p-webrtc-star": "^0.20.1", "mafmt": "^8.0.0", "multiaddr": "^8.0.0", @@ -74,13 +75,14 @@ "yargs": "^16.0.3" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "nanoid": "^3.1.12", "ncp": "^2.0.0", "rimraf": "^3.0.2", "sinon": "^9.0.3", "string-argv": "^0.3.1", "temp-write": "^4.0.0", + "typescript": "^4.0.3", "wrtc": "^0.4.6" }, "optionalDependencies": { diff --git a/packages/ipfs-cli/src/commands/bootstrap/rm.js b/packages/ipfs-cli/src/commands/bootstrap/rm.js index ef9bab21e1..0b3d59e3e8 100644 --- a/packages/ipfs-cli/src/commands/bootstrap/rm.js +++ b/packages/ipfs-cli/src/commands/bootstrap/rm.js @@ -1,8 +1,5 @@ 'use strict' -const debug = require('debug') -const log = debug('cli:bootstrap') -log.error = debug('cli:bootstrap:error') const parseDuration = require('parse-duration').default module.exports = { diff --git a/packages/ipfs-cli/src/commands/config/profile/apply.js b/packages/ipfs-cli/src/commands/config/profile/apply.js index 82a3c9c223..38d6882a74 100644 --- a/packages/ipfs-cli/src/commands/config/profile/apply.js +++ b/packages/ipfs-cli/src/commands/config/profile/apply.js @@ -27,7 +27,7 @@ module.exports = { timeout }) const delta = JSONDiff.diff(diff.original, diff.updated) - const res = JSONDiff.formatters.console.format(delta, diff.original) + const res = delta && JSONDiff.formatters.console.format(delta, diff.original) if (res) { print(res) diff --git a/packages/ipfs-cli/src/commands/config/show.js b/packages/ipfs-cli/src/commands/config/show.js index b9cd53c667..ef79a61741 100644 --- a/packages/ipfs-cli/src/commands/config/show.js +++ b/packages/ipfs-cli/src/commands/config/show.js @@ -1,8 +1,5 @@ 'use strict' -const debug = require('debug') -const log = debug('cli:config') -log.error = debug('cli:config:error') const parseDuration = require('parse-duration').default module.exports = { diff --git a/packages/ipfs-cli/src/commands/daemon.js b/packages/ipfs-cli/src/commands/daemon.js index 95fbcea957..1843dc3897 100644 --- a/packages/ipfs-cli/src/commands/daemon.js +++ b/packages/ipfs-cli/src/commands/daemon.js @@ -56,7 +56,7 @@ module.exports = { // read and parse config file if (argv.initConfig) { try { - const raw = fs.readFileSync(argv.initConfig) + const raw = fs.readFileSync(argv.initConfig, { encoding: 'utf8' }) config = JSON.parse(raw) } catch (error) { debug(error) @@ -83,12 +83,15 @@ module.exports = { try { await daemon.start() + // @ts-ignore - _httpApi is possibly undefined daemon._httpApi._apiServers.forEach(apiServer => { print(`API listening on ${apiServer.info.ma}`) }) + // @ts-ignore - _httpGateway is possibly undefined daemon._httpGateway._gatewayServers.forEach(gatewayServer => { print(`Gateway (read only) listening on ${gatewayServer.info.ma}`) }) + // @ts-ignore - _httpApi is possibly undefined daemon._httpApi._apiServers.forEach(apiServer => { print(`Web UI available at ${toUri(apiServer.info.ma)}/webui`) }) diff --git a/packages/ipfs-cli/src/commands/get.js b/packages/ipfs-cli/src/commands/get.js index fc19986f02..053de10326 100644 --- a/packages/ipfs-cli/src/commands/get.js +++ b/packages/ipfs-cli/src/commands/get.js @@ -3,7 +3,7 @@ const fs = require('fs') const path = require('path') const toIterable = require('stream-to-it') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const { map } = require('streaming-iterables') const parseDuration = require('parse-duration').default diff --git a/packages/ipfs-cli/src/commands/id.js b/packages/ipfs-cli/src/commands/id.js index cd51718efe..b4868227a2 100644 --- a/packages/ipfs-cli/src/commands/id.js +++ b/packages/ipfs-cli/src/commands/id.js @@ -37,6 +37,6 @@ module.exports = { return } - print(JSON.stringify(id, '', 2)) + print(JSON.stringify(id, null, 2)) } } diff --git a/packages/ipfs-cli/src/commands/init.js b/packages/ipfs-cli/src/commands/init.js index 6cdbea8e21..2a5f9fd0c1 100644 --- a/packages/ipfs-cli/src/commands/init.js +++ b/packages/ipfs-cli/src/commands/init.js @@ -55,7 +55,7 @@ module.exports = { // read and parse config file if (argv.defaultConfig) { try { - const raw = fs.readFileSync(argv.defaultConfig) + const raw = fs.readFileSync(argv.defaultConfig, { encoding: 'utf8' }) config = JSON.parse(raw) } catch (error) { debug(error) diff --git a/packages/ipfs-cli/src/commands/key/import.js b/packages/ipfs-cli/src/commands/key/import.js index 8b81c5b4d1..7cbc6a0e21 100644 --- a/packages/ipfs-cli/src/commands/key/import.js +++ b/packages/ipfs-cli/src/commands/key/import.js @@ -19,7 +19,7 @@ module.exports = { describe: 'Input PEM file', type: 'string', demandOption: true, - coerce: ('input', input => fs.readFileSync(input, 'utf8')) + coerce: input => fs.readFileSync(input, 'utf8') }, timeout: { type: 'string', diff --git a/packages/ipfs-cli/src/commands/refs-local.js b/packages/ipfs-cli/src/commands/refs-local.js index ac86abeb0e..5b3cc2ed8f 100644 --- a/packages/ipfs-cli/src/commands/refs-local.js +++ b/packages/ipfs-cli/src/commands/refs-local.js @@ -23,7 +23,7 @@ module.exports = { } }, - async handler ({ ctx: { ipfs, print }, timeout, cidBase, multihash }) { + async handler ({ ctx: { ipfs, print }, timeout, multihash }) { for await (const { ref, err } of ipfs.refs.local({ timeout })) { diff --git a/packages/ipfs-cli/src/commands/stats/bitswap.js b/packages/ipfs-cli/src/commands/stats/bitswap.js index 20ce0a41a2..a90245f09f 100644 --- a/packages/ipfs-cli/src/commands/stats/bitswap.js +++ b/packages/ipfs-cli/src/commands/stats/bitswap.js @@ -1,8 +1,9 @@ 'use strict' // This is an alias for `bitswap stat`. -const bitswapStats = require('../bitswap/stat.js') -// The command needs to be renamed, else it would be `stats stat` instead of -// `stats bitswap` -bitswapStats.command = 'bitswap' +const bitswapStats = Object.assign(require('../bitswap/stat.js'), { + // The command needs to be renamed, else it would be `stats stat` instead of + // `stats bitswap` + command: 'bitswap' +}) module.exports = bitswapStats diff --git a/packages/ipfs-cli/src/commands/swarm/addrs/local.js b/packages/ipfs-cli/src/commands/swarm/addrs/local.js index c0c402389f..1bcf1f807b 100644 --- a/packages/ipfs-cli/src/commands/swarm/addrs/local.js +++ b/packages/ipfs-cli/src/commands/swarm/addrs/local.js @@ -1,8 +1,5 @@ 'use strict' -const debug = require('debug') -const log = debug('cli:object') -log.error = debug('cli:object:error') const parseDuration = require('parse-duration').default module.exports = { diff --git a/packages/ipfs-cli/src/daemon.js b/packages/ipfs-cli/src/daemon.js index 59ef5fbb9b..30492473fe 100644 --- a/packages/ipfs-cli/src/daemon.js +++ b/packages/ipfs-cli/src/daemon.js @@ -14,14 +14,15 @@ const HttpGateway = require('ipfs-http-gateway') const createRepo = require('ipfs-core/src/runtime/repo-nodejs') class Daemon { - constructor (options) { - this._options = options || {} + constructor (options = {}) { + this._options = options if (process.env.IPFS_MONITORING) { // Setup debug metrics collection const prometheusClient = require('prom-client') const prometheusGcStats = require('prometheus-gc-stats') const collectDefaultMetrics = prometheusClient.collectDefaultMetrics + // @ts-ignore - timeout isn't in typedefs collectDefaultMetrics({ timeout: 5000 }) prometheusGcStats(prometheusClient.register)() } @@ -46,7 +47,9 @@ class Daemon { this._httpGateway = await httpGateway.start() // for the CLI to know the where abouts of the API + // @ts-ignore - _apiServers is possibly undefined if (this._httpApi._apiServers.length) { + // @ts-ignore - _apiServers is possibly undefined await repo.apiAddr.set(this._httpApi._apiServers[0].info.ma) } @@ -59,6 +62,7 @@ class Daemon { await Promise.all([ this._httpApi && this._httpApi.stop(), this._httpGateway && this._httpGateway.stop(), + // @ts-ignore - may not have stop if init was false this._ipfs && this._ipfs.stop() ]) log('stopped') @@ -71,6 +75,7 @@ function getLibp2p ({ libp2pOptions, options, config, peerId }) { let electronWebRTC let wrtc try { + // @ts-ignore - cant find type info electronWebRTC = require('electron-webrtc')() } catch (err) { log('failed to load optional electron-webrtc dependency') @@ -99,6 +104,7 @@ function getLibp2p ({ libp2pOptions, options, config, peerId }) { const delegateApiOptions = { host: delegateAddr.host, // port is a string atm, so we need to convert for the check + // @ts-ignore - parseInt(input:string) => number protocol: parseInt(delegateAddr.port) === 443 ? 'https' : 'http', port: delegateAddr.port } diff --git a/packages/ipfs-cli/src/parser.js b/packages/ipfs-cli/src/parser.js index 4be078ad61..fe93a95c44 100644 --- a/packages/ipfs-cli/src/parser.js +++ b/packages/ipfs-cli/src/parser.js @@ -1,5 +1,6 @@ 'use strict' +// @ts-ignore const yargs = require('yargs/yargs')(process.argv.slice(2)) const utils = require('./utils') diff --git a/packages/ipfs-cli/src/utils.js b/packages/ipfs-cli/src/utils.js index 769829ec77..986f9f06a9 100644 --- a/packages/ipfs-cli/src/utils.js +++ b/packages/ipfs-cli/src/utils.js @@ -26,6 +26,12 @@ const isDaemonOn = () => { let visible = true const disablePrinting = () => { visible = false } +/** + * + * @param {string} msg + * @param {boolean} [includeNewline=true] + * @param {boolean} [isError=false] + */ const print = (msg, includeNewline = true, isError = false) => { if (visible) { if (msg === undefined) { @@ -38,18 +44,29 @@ const print = (msg, includeNewline = true, isError = false) => { } print.clearLine = () => { - return process.stdout.clearLine() + return process.stdout.clearLine(0) } print.cursorTo = (pos) => { process.stdout.cursorTo(pos) } +/** + * Write data directly to stdout + * + * @param {string|Uint8Array} data + */ print.write = (data) => { process.stdout.write(data) } -print.error = (msg, newline) => { +/** + * Print an error message + * + * @param {string} msg + * @param {boolean} [newline=true] + */ +print.error = (msg, newline = true) => { print(msg, newline, true) } @@ -92,6 +109,7 @@ async function getIpfs (argv) { start: false, pass: argv.pass }) + return { isDaemon: false, ipfs, diff --git a/packages/ipfs-cli/tsconfig.json b/packages/ipfs-cli/tsconfig.json new file mode 100644 index 0000000000..8fdab14fa1 --- /dev/null +++ b/packages/ipfs-cli/tsconfig.json @@ -0,0 +1,24 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "package.json" + ], + "references": [ + { + "path": "../ipfs-core-utils" + }, + { + "path": "../ipfs-core" + }, + { + "path": "../ipfs-http-client" + }, + { + "path": "../ipfs-http-server" + } + ] +} diff --git a/packages/ipfs-core-utils/.aegir.js b/packages/ipfs-core-utils/.aegir.js index 5e1ac8074a..b0dc5419a4 100644 --- a/packages/ipfs-core-utils/.aegir.js +++ b/packages/ipfs-core-utils/.aegir.js @@ -48,6 +48,7 @@ const echoServer = async (port = 3000) => { let echo module.exports = { + bundlesize: { maxSize: '540B' }, hooks: { pre: async () => { echo = await echoServer() diff --git a/packages/ipfs-core-utils/package.json b/packages/ipfs-core-utils/package.json index c69180d9e1..ee55e91ae5 100644 --- a/packages/ipfs-core-utils/package.json +++ b/packages/ipfs-core-utils/package.json @@ -2,15 +2,22 @@ "name": "ipfs-core-utils", "version": "0.4.0", "description": "Package to share code between ipfs and ipfs-http-client", - "main": "src/index.js", "author": "Alex Potsides ", "homepage": "https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs-core-utils#readme", "bugs": "https://github.com/ipfs/js-ipfs/issues", "leadMaintainer": "Alex Potsides ", + "main": "src/index.js", "files": [ "src", "dist" ], + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } + }, "repository": { "type": "git", "url": "git+https://github.com/ipfs/js-ipfs.git" @@ -22,7 +29,9 @@ "test:electron-renderer": "aegir test -t electron-renderer", "test:node": "aegir test -t node", "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:js && npm run build:types", + "build:js": "aegir build", + "build:types": "tsc --build", "clean": "rm -rf ./dist", "dep-check": "aegir dep-check" }, @@ -39,7 +48,8 @@ "uint8arrays": "^1.1.0" }, "devDependencies": { - "aegir": "^27.0.0", - "delay": "^4.4.0" + "aegir": "^28.0.0", + "delay": "^4.4.0", + "typescript": "^4.0.3" } } diff --git a/packages/ipfs-core-utils/src/cid.js b/packages/ipfs-core-utils/src/cid.js index 87d6754ee6..b0578d375c 100644 --- a/packages/ipfs-core-utils/src/cid.js +++ b/packages/ipfs-core-utils/src/cid.js @@ -10,23 +10,22 @@ const CID = require('cids') * that it will also not apply the encoding (since v0 CIDs can only be encoded * as base58btc). * - * @param {CID | Buffer | string} cid - The CID to encode + * @param {CID|Uint8Array|string} input - The CID to encode * @param {Object} [options] - Optional options * @param {string} [options.base] - Name of multibase codec to encode the CID with * @param {boolean} [options.upgrade] - Automatically upgrade v0 CIDs to v1 when * necessary. Default: true. - * @returns {string} + * @returns {string} - CID in string representation */ -exports.cidToString = (cid, options) => { - options = options || {} - options.upgrade = options.upgrade !== false - - if (!CID.isCID(cid)) { - cid = new CID(cid) - } +exports.cidToString = (input, options = {}) => { + const upgrade = options.upgrade !== false + let cid = CID.isCID(input) + ? input + // @ts-ignore - TS seems to get confused by the type defs in CID repo. + : new CID(input) if (cid.version === 0 && options.base && options.base !== 'base58btc') { - if (!options.upgrade) return cid.toString() + if (!upgrade) return cid.toString() cid = cid.toV1() } diff --git a/packages/ipfs-core-utils/src/files/format-mode.js b/packages/ipfs-core-utils/src/files/format-mode.js index f25ae5c0fa..452aa182b5 100644 --- a/packages/ipfs-core-utils/src/files/format-mode.js +++ b/packages/ipfs-core-utils/src/files/format-mode.js @@ -24,6 +24,12 @@ function checkPermission (mode, perm, type, output) { } } +/** + * + * @param {Mode} mode + * @param {boolean} isDirectory + * @returns {string} + */ function formatMode (mode, isDirectory) { const output = [] @@ -64,3 +70,7 @@ function formatMode (mode, isDirectory) { } module.exports = formatMode + +/** + * @typedef {number} Mode + */ diff --git a/packages/ipfs-core-utils/src/files/format-mtime.js b/packages/ipfs-core-utils/src/files/format-mtime.js index 486cecb406..13f6e9e2d3 100644 --- a/packages/ipfs-core-utils/src/files/format-mtime.js +++ b/packages/ipfs-core-utils/src/files/format-mtime.js @@ -1,5 +1,9 @@ 'use strict' +/** + * @param {MTime} mtime + * @returns {string} + */ function formatMtime (mtime) { if (mtime == null) { return '-' @@ -18,4 +22,12 @@ function formatMtime (mtime) { }) } +/** + * @typedef {object} MTime + * @property {number} secs - the number of seconds since (positive) or before + * (negative) the Unix Epoch began + * @property {number} nsecs - the number of nanoseconds since the last full + * second. + */ + module.exports = formatMtime diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js b/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js index c96260b528..98e4145dce 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.browser.js @@ -3,7 +3,7 @@ const normaliseContent = require('./normalise-content.browser') const normaliseInput = require('./normalise-input') -/* +/** * Transforms any of the `ipfs.add` input types into * * ``` @@ -12,7 +12,7 @@ const normaliseInput = require('./normalise-input') * * See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options * - * @param input Object - * @return AsyncInterable<{ path, mode, mtime, content: Blob }> + * @param {import('./normalise-input').Source} input + * @returns {AsyncIterable>} */ module.exports = (input) => normaliseInput(input, normaliseContent) diff --git a/packages/ipfs-core-utils/src/files/normalise-input/index.js b/packages/ipfs-core-utils/src/files/normalise-input/index.js index 00dd946bfa..d6b987807c 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/index.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/index.js @@ -3,7 +3,7 @@ const normaliseContent = require('./normalise-content') const normaliseInput = require('./normalise-input') -/* +/** * Transforms any of the `ipfs.add` input types into * * ``` @@ -12,7 +12,7 @@ const normaliseInput = require('./normalise-input') * * See https://github.com/ipfs/js-ipfs/blob/master/docs/core-api/FILES.md#ipfsadddata-options * - * @param input Object - * @return AsyncInterable<{ path, mode, mtime, content: AsyncIterable }> + * @param {import('./normalise-input').Source} input + * @returns {AsyncIterable>>} */ module.exports = (input) => normaliseInput(input, normaliseContent) diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js index 415ec8dd86..690a70f7a3 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.browser.js @@ -8,9 +8,14 @@ const all = require('it-all') const { isBytes, - isBlob + isBlob, + isReadableStream } = require('./utils') +/** + * @param {import('./normalise-input').ToContent} input + * @returns {Promise} + */ async function toBlob (input) { // Bytes | String if (isBytes(input) || typeof input === 'string' || input instanceof String) { @@ -23,7 +28,7 @@ async function toBlob (input) { } // Browser stream - if (typeof input.getReader === 'function') { + if (isReadableStream(input)) { input = browserStreamToIt(input) } @@ -53,6 +58,10 @@ async function toBlob (input) { throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') } +/** + * @param {AsyncIterable|Iterable} stream + * @returns {Promise} + */ async function itToBlob (stream) { const parts = [] diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js index 1e726ca49a..072460f7bf 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-content.js @@ -9,13 +9,18 @@ const all = require('it-all') const map = require('it-map') const { isBytes, + isReadableStream, isBlob } = require('./utils') +/** + * @param {import('./normalise-input').ToContent} input + * @returns {AsyncIterable} + */ async function * toAsyncIterable (input) { // Bytes | String if (isBytes(input) || typeof input === 'string' || input instanceof String) { - yield toBuffer(input) + yield toBytes(input) return } @@ -26,7 +31,7 @@ async function * toAsyncIterable (input) { } // Browser stream - if (typeof input.getReader === 'function') { + if (isReadableStream(input)) { input = browserStreamToIt(input) } @@ -45,13 +50,13 @@ async function * toAsyncIterable (input) { // (Async)Iterable if (Number.isInteger(value)) { - yield toBuffer(await all(peekable)) + yield Uint8Array.from((await all(peekable))) return } // (Async)Iterable if (isBytes(value) || typeof value === 'string' || value instanceof String) { - yield * map(peekable, chunk => toBuffer(chunk)) + yield * map(peekable, toBytes) return } } @@ -59,11 +64,24 @@ async function * toAsyncIterable (input) { throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') } -function toBuffer (chunk) { - if (isBytes(chunk)) { +/** + * + * @param {ArrayBuffer | ArrayBufferView | string | InstanceType | number[]} chunk + * @returns {Uint8Array} + */ +function toBytes (chunk) { + if (chunk instanceof Uint8Array) { return chunk } + if (ArrayBuffer.isView(chunk)) { + return new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength) + } + + if (chunk instanceof ArrayBuffer) { + return new Uint8Array(chunk) + } + if (Array.isArray(chunk)) { return Uint8Array.from(chunk) } diff --git a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js index 90bb00015f..8b3386779d 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/normalise-input.js @@ -7,9 +7,19 @@ const map = require('it-map') const { isBytes, isBlob, + isReadableStream, isFileObject } = require('./utils') +// eslint-disable-next-line complexity + +/** + * @template {Blob|AsyncIterable} Content + * @param {Source} input + * @param {(content:ToContent) => Content|Promise} normaliseContent + * @returns {AsyncIterable>} + */ +// eslint-disable-next-line complexity module.exports = async function * normaliseInput (input, normaliseContent) { // must give us something if (input === null || input === undefined) { @@ -22,7 +32,7 @@ module.exports = async function * normaliseInput (input, normaliseContent) { return } - // Buffer|ArrayBuffer|TypedArray + // Uint8Array|ArrayBuffer|TypedArray // Blob|File if (isBytes(input) || isBlob(input)) { yield toFileObject(input, normaliseContent) @@ -30,7 +40,7 @@ module.exports = async function * normaliseInput (input, normaliseContent) { } // Browser ReadableStream - if (typeof input.getReader === 'function') { + if (isReadableStream(input)) { input = browserStreamToIt(input) } @@ -66,7 +76,7 @@ module.exports = async function * normaliseInput (input, normaliseContent) { // (Async)Iterable> // ReadableStream<(Async)Iterable> // ReadableStream> - if (value[Symbol.iterator] || value[Symbol.asyncIterator] || typeof value.getReader === 'function') { + if (value[Symbol.iterator] || value[Symbol.asyncIterator] || isReadableStream(value)) { yield * map(peekable, (value) => toFileObject(value, normaliseContent)) return } @@ -83,18 +93,62 @@ module.exports = async function * normaliseInput (input, normaliseContent) { throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') } +/** + * @template {Blob|AsyncIterable} Content + * @param {ToFile} input + * @param {(content:ToContent) => Content|Promise} normaliseContent + * @returns {Promise>} + */ async function toFileObject (input, normaliseContent) { - const obj = { - path: input.path || '', - mode: input.mode, - mtime: input.mtime - } - - if (input.content) { - obj.content = await normaliseContent(input.content) - } else if (!input.path) { // Not already a file object with path or content prop - obj.content = await normaliseContent(input) + // @ts-ignore - Those properties don't exist on most input types + const { path, mode, mtime, content } = input + + const file = { path: path || '', mode, mtime } + if (content) { + file.content = await normaliseContent(content) + } else if (!path) { // Not already a file object with path or content prop + // @ts-ignore - input still can be different ToContent + file.content = await normaliseContent(input) } - return obj + return file } + +/** + * @typedef {import('../format-mtime').MTime} MTime + * @typedef {import('../format-mode').Mode} Mode + * @typedef {Object} Directory + * @property {string} path + * @property {Mode} [mode] + * @property {MTime} [mtime] + * @property {undefined} [content] + * + * @typedef {Object} FileInput + * @property {string} [path] + * @property {ToContent} [content] + * @property {number | string} [mode] + * @property {UnixTime} [mtime] + * + * @typedef {Date | MTime | HRTime} UnixTime + * + * Time representation as tuple of two integers, as per the output of + * [`process.hrtime()`](https://nodejs.org/dist/latest/docs/api/process.html#process_process_hrtime_time). + * @typedef {[number, number]} HRTime + * + * @typedef {string|InstanceType|ArrayBufferView|ArrayBuffer|Blob|Iterable | AsyncIterable | ReadableStream} ToContent + * @typedef {ToContent|FileInput} ToFile + * @typedef {Iterable | AsyncIterable | ReadableStream} Source + */ +/** + * @template {AsyncIterable|Blob} Content + * @typedef {Object} File + * @property {string} path + * @property {Mode} [mode] + * @property {MTime} [mtime] + * @property {Content} [content] + */ + +/** + * @template {AsyncIterable|Blob} Content + * @typedef {File|Directory} Entry + */ diff --git a/packages/ipfs-core-utils/src/files/normalise-input/utils.js b/packages/ipfs-core-utils/src/files/normalise-input/utils.js index c09f0c8241..e1d0cb2b61 100644 --- a/packages/ipfs-core-utils/src/files/normalise-input/utils.js +++ b/packages/ipfs-core-utils/src/files/normalise-input/utils.js @@ -2,21 +2,42 @@ const { Blob } = require('ipfs-utils/src/globalthis') +/** + * @param {any} obj + * @returns {obj is ArrayBufferView|ArrayBuffer} + */ function isBytes (obj) { return ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer } +/** + * @param {any} obj + * @returns {obj is Blob} + */ function isBlob (obj) { return typeof Blob !== 'undefined' && obj instanceof Blob } -// An object with a path or content property +/** + * An object with a path or content property + * + * @param {any} obj + * @returns {obj is import('./normalise-input').FileInput} + */ function isFileObject (obj) { return typeof obj === 'object' && (obj.path || obj.content) } +/** + * @param {any} value + * @returns {value is ReadableStream} + */ +const isReadableStream = (value) => + value && typeof value.getReader === 'function' + module.exports = { isBytes, isBlob, - isFileObject + isFileObject, + isReadableStream } diff --git a/packages/ipfs-core-utils/src/pins/normalise-input.js b/packages/ipfs-core-utils/src/pins/normalise-input.js index 9cf591932a..fc4299b7ce 100644 --- a/packages/ipfs-core-utils/src/pins/normalise-input.js +++ b/packages/ipfs-core-utils/src/pins/normalise-input.js @@ -3,10 +3,10 @@ const errCode = require('err-code') const CID = require('cids') -/* +/** * Transform one of: * - * ``` + * ```ts * CID * String * { cid: CID recursive, metadata } @@ -22,97 +22,102 @@ const CID = require('cids') * ``` * Into: * - * ``` - * AsyncIterable<{ path: CID|String, recursive, metadata }> + * ```ts + * AsyncIterable<{ path: CID|String, recursive:boolean, metadata }> * ``` * - * @param input Object - * @return AsyncIterable<{ path: CID|String, recursive, metadata }> + * @param {Source} input + * @returns {AsyncIterable} */ -module.exports = function normaliseInput (input) { +// eslint-disable-next-line complexity +module.exports = async function * normaliseInput (input) { // must give us something if (input === null || input === undefined) { - throw errCode(new Error(`Unexpected input: ${input}`, 'ERR_UNEXPECTED_INPUT')) + throw errCode(new Error(`Unexpected input: ${input}`), 'ERR_UNEXPECTED_INPUT') } // CID|String - if (CID.isCID(input) || input instanceof String || typeof input === 'string') { - return (async function * () { // eslint-disable-line require-await - yield toPin({ cid: input }) - })() + if (CID.isCID(input)) { + yield toPin({ cid: input }) + return + } + + if (input instanceof String || typeof input === 'string') { + yield toPin({ path: input }) + return } // { cid: CID recursive, metadata } + // @ts-ignore - it still could be iterable or async iterable if (input.cid != null || input.path != null) { - return (async function * () { // eslint-disable-line require-await - yield toPin(input) - })() + // @ts-ignore + return yield toPin(input) } // Iterable if (input[Symbol.iterator]) { - return (async function * () { // eslint-disable-line require-await - const iterator = input[Symbol.iterator]() - const first = iterator.next() - if (first.done) return iterator - - // Iterable - if (CID.isCID(first.value) || first.value instanceof String || typeof first.value === 'string') { - yield toPin({ cid: first.value }) - for (const cid of iterator) { - yield toPin({ cid }) - } - return + const iterator = input[Symbol.iterator]() + const first = iterator.next() + if (first.done) return iterator + + // Iterable + if (CID.isCID(first.value) || first.value instanceof String || typeof first.value === 'string') { + yield toPin({ cid: first.value }) + for (const cid of iterator) { + yield toPin({ cid }) } - - // Iterable<{ cid: CID recursive, metadata }> - if (first.value.cid != null || first.value.path != null) { - yield toPin(first.value) - for (const obj of iterator) { - yield toPin(obj) - } - return + return + } + + // Iterable<{ cid: CID recursive, metadata }> + if (first.value.cid != null || first.value.path != null) { + yield toPin(first.value) + for (const obj of iterator) { + yield toPin(obj) } + return + } - throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') - })() + throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') } // AsyncIterable if (input[Symbol.asyncIterator]) { - return (async function * () { - const iterator = input[Symbol.asyncIterator]() - const first = await iterator.next() - if (first.done) return iterator - - // AsyncIterable - if (CID.isCID(first.value) || first.value instanceof String || typeof first.value === 'string') { - yield toPin({ cid: first.value }) - for await (const cid of iterator) { - yield toPin({ cid }) - } - return + const iterator = input[Symbol.asyncIterator]() + const first = await iterator.next() + if (first.done) return iterator + + // AsyncIterable + if (CID.isCID(first.value) || first.value instanceof String || typeof first.value === 'string') { + yield toPin({ cid: first.value }) + for await (const cid of iterator) { + yield toPin({ cid }) } - - // AsyncIterable<{ cid: CID|String recursive, metadata }> - if (first.value.cid != null || first.value.path != null) { - yield toPin(first.value) - for await (const obj of iterator) { - yield toPin(obj) - } - return + return + } + + // AsyncIterable<{ cid: CID|String recursive, metadata }> + if (first.value.cid != null || first.value.path != null) { + yield toPin(first.value) + for await (const obj of iterator) { + yield toPin(obj) } + return + } - throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') - })() + throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') } throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') } +/** + * @param {ToPinWithPath|ToPinWithCID} input + * @returns {Pin} + */ function toPin (input) { const pin = { - path: input.cid || input.path, + path: input.path == null ? input.cid : `${input.path}`, recursive: input.recursive !== false } @@ -122,3 +127,25 @@ function toPin (input) { return pin } + +/** + * @typedef {Object} ToPinWithPath + * @property {string | InstanceType | CID} path + * @property {undefined} [cid] + * @property {boolean} [recursive] + * @property {any} [metadata] + * + * @typedef {Object} ToPinWithCID + * @property {undefined} [path] + * @property {CID} cid + * @property {boolean} [recursive] + * @property {any} [metadata] + * + * @typedef {CID|string|InstanceType|ToPinWithPath|ToPinWithPath} ToPin + * @typedef {ToPin|Iterable|AsyncIterable} Source + * + * @typedef {Object} Pin + * @property {string|CID} path + * @property {boolean} recursive + * @property {any} [metadata] + */ diff --git a/packages/ipfs-core-utils/src/to-cid-and-path.js b/packages/ipfs-core-utils/src/to-cid-and-path.js index 01d09791a3..9b4bdb23d3 100644 --- a/packages/ipfs-core-utils/src/to-cid-and-path.js +++ b/packages/ipfs-core-utils/src/to-cid-and-path.js @@ -5,6 +5,10 @@ const errCode = require('err-code') const IPFS_PREFIX = '/ipfs/' +/** + * @param {string|Uint8Array|CID} string + * @returns {{cid:CID, path?:string}} + */ const toCidAndPath = (string) => { if (string instanceof Uint8Array) { try { @@ -30,7 +34,7 @@ const toCidAndPath = (string) => { let path try { - cid = new CID(parts.shift()) + cid = new CID(/** @type {string} */(parts.shift())) } catch (err) { throw errCode(err, 'ERR_INVALID_CID') } diff --git a/packages/ipfs-core-utils/tsconfig.json b/packages/ipfs-core-utils/tsconfig.json new file mode 100644 index 0000000000..979a39adab --- /dev/null +++ b/packages/ipfs-core-utils/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "package.json" + ] +} diff --git a/packages/ipfs-core/.aegir.js b/packages/ipfs-core/.aegir.js index 7ad4e2fc1d..7fca22fd09 100644 --- a/packages/ipfs-core/.aegir.js +++ b/packages/ipfs-core/.aegir.js @@ -15,7 +15,7 @@ let sigServerB let ipfsdServer module.exports = { - bundlesize: { maxSize: '530kB' }, + bundlesize: { maxSize: '517kB' }, karma: { files: [{ pattern: 'node_modules/interface-ipfs-core/test/fixtures/**/*', @@ -74,7 +74,7 @@ module.exports = { }, { type: 'js', ipfsModule: require(__dirname), - ipfsHttpModule: require('ipfs-http-client'), + ipfsHttpModule: require('../ipfs-http-client'), ipfsBin: path.resolve(path.join(__dirname, '..', 'ipfs', 'src', 'cli.js')), ipfsOptions: { libp2p: { diff --git a/packages/ipfs-core/package.json b/packages/ipfs-core/package.json index a6a6862712..b461c8e81f 100644 --- a/packages/ipfs-core/package.json +++ b/packages/ipfs-core/package.json @@ -9,11 +9,11 @@ "bugs": "https://github.com/ipfs/js-ipfs/issues", "license": "(Apache-2.0 OR MIT)", "leadMaintainer": "Alex Potsides ", + "main": "src/index.js", "files": [ "src", "dist" ], - "main": "src/index.js", "browser": { "./src/runtime/init-assets-nodejs.js": "./src/runtime/init-assets-browser.js", "./src/runtime/config-nodejs.js": "./src/runtime/config-browser.js", @@ -26,13 +26,22 @@ "./test/utils/create-repo-nodejs.js": "./test/utils/create-repo-browser.js", "ipfs-utils/src/files/glob-source": false }, + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } + }, "repository": { "type": "git", "url": "git+https://github.com/ipfs/js-ipfs.git" }, "scripts": { "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:js && npm run build:types", + "build:js": "aegir build", + "build:types": "tsc --build", "test": "aegir test", "test:node": "aegir test -t node", "test:browser": "aegir test -t browser", @@ -63,7 +72,6 @@ "ipfs-bitswap": "^3.0.0", "ipfs-block-service": "^0.18.0", "ipfs-core-utils": "^0.4.0", - "ipfs-http-client": "^47.0.1", "ipfs-repo": "^6.0.3", "ipfs-unixfs": "^2.0.3", "ipfs-unixfs-exporter": "^3.0.4", @@ -117,7 +125,7 @@ "uint8arrays": "^1.1.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "delay": "^4.4.0", "ipfsd-ctl": "^7.0.2", "interface-ipfs-core": "^0.140.0", @@ -127,7 +135,8 @@ "p-event": "^4.2.0", "p-map": "^4.0.0", "rimraf": "^3.0.2", - "sinon": "^9.0.3" + "sinon": "^9.0.3", + "typescript": "^4.0.3" }, "optionalDependencies": { "prom-client": "^12.0.0", diff --git a/packages/ipfs-core/src/components/add-all/index.js b/packages/ipfs-core/src/components/add-all/index.js index 7fefc99e82..c5c7502ddb 100644 --- a/packages/ipfs-core/src/components/add-all/index.js +++ b/packages/ipfs-core/src/components/add-all/index.js @@ -1,65 +1,30 @@ 'use strict' const importer = require('ipfs-unixfs-importer') -const normaliseAddInput = require('ipfs-core-utils/src/files/normalise-input') +const normaliseAddInput = require('ipfs-core-utils/src/files/normalise-input/index') const { parseChunkerString } = require('./utils') const { pipe } = require('it-pipe') const { withTimeoutOption } = require('../../utils') const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) /** - * @typedef {Uint8Array | Blob | string | Iterable | Iterable | AsyncIterable | ReadableStream} FileContent - * - * @typedef {object} FileObject - * - If no path is specified, then the item will be added to the root level and will be given a name according to it's CID. - * - If no content is passed, then the item is treated as an empty directory. - * - One of path or content must be passed. - * @property {string} [path] - The path you want to the file to be accessible at from the root CID _after_ it has been added - * @property {FileContent} [content] - The contents of the file - * @property {number | string} [mode] - File mode to store the entry with (see https://en.wikipedia.org/wiki/File_system_permissions#Numeric_notation) - * @property {UnixTime} [mtime] - The modification time of the entry - * - * @typedef {FileContent | FileObject} Source - * @typedef {Iterable | AsyncIterable | ReadableStream} FileStream - * - * @typedef {Date | UnixTimeObj | [number, number]} UnixTime - As an array of numbers, it must have two elements, as per the output of [`process.hrtime()`](https://nodejs.org/dist/latest/docs/api/process.html#process_process_hrtime_time). - * - * @typedef {object} UnixTimeObj - * @property {number} secs - the number of seconds since (positive) or before (negative) the Unix Epoch began - * @property {number} [nsecs] - the number of nanoseconds since the last full second. - * - * @typedef {object} UnixFSEntry - * @property {string} path - * @property {import('cids')} cid - * @property {number} mode - * @property {UnixTimeObj} mtime - * @property {number} size - */ - -/** - * @typedef {import('../add').AddOptions & _AddAllOptions} AddAllOptions - * @typedef {object} _AddAllOptions - * @property {boolean} [enableShardingExperiment] - allows to create directories with an unlimited number of entries currently size of unixfs directories is limited by the maximum block size. Note that this is an experimental feature (default: `false`) - * @property {number} [shardSplitThreshold] - Directories with more than this number of files will be created as HAMT-sharded directories (default: `1000`) - */ - -/** - * Import multiple files and data into IPFS. - * - * @template {Record} ExtraOptions - * @callback AddAll - * @param {FileStream} source - * @param {AddAllOptions & import('../../utils').AbortOptions & ExtraOptions} [options] - * @returns {AsyncIterable} + * @param {Object} config + * @param {import('..').Block} config.block + * @param {import('..').GCLock} config.gcLock + * @param {import('..').Preload} config.preload + * @param {import('..').Pin} config.pin + * @param {import('../init').ConstructorOptions} config.options */ - module.exports = ({ block, gcLock, preload, pin, options: constructorOptions }) => { const isShardingEnabled = constructorOptions.EXPERIMENTAL && constructorOptions.EXPERIMENTAL.sharding /** - * @type {AddAll<{}>} + * Import multiple files and data into IPFS. + * + * @param {FileStream} source + * @param {AddAllOptions & AbortOptions} [options] + * @returns {AsyncIterable} */ - async function * addAll (source, options) { - options = options || {} + async function * addAll (source, options = {}) { const opts = mergeOptions({ shardSplitThreshold: isShardingEnabled ? 1000 : Infinity, strategy: 'balanced' @@ -176,3 +141,41 @@ function pinFile (pin, opts) { } } } + +/** + * @typedef {object} UnixFSEntry + * @property {string} path + * @property {CID} cid + * @property {number} [mode] + * @property {MTime} [mtime] + * @property {number} size + * + * @typedef {Object} AddAllOptions + * @property {string} [chunker='size-262144'] - Chunking algorithm used to build + * ipfs DAGs. + * @property {0|1} [cidVersion=0] - The CID version to use when storing the data. + * @property {boolean} [enableShardingExperiment=false] - Allows to create + * directories with an unlimited number of entries currently size of unixfs + * directories is limited by the maximum block size. **Note** that this is an + * experimental feature. + * @property {string} [hashAlg='sha2-256'] - Multihash hashing algorithm to use. + * @property {boolean} [onlyHash=false] - If true, will not add blocks to the + * blockstore. + * @property {boolean} [pin=true] - Pin this object when adding. + * @property {(bytes:number) => void} [progress] - A function that will be + * called with the byte length of chunks as a file is added to ipfs. + * @property {boolean} [rawLeaves=false] - If true, DAG leaves will contain raw + * file data and not be wrapped in a protobuf. + * @property {number} [shardSplitThreshold=1000] - Directories with more than this + * number of files will be created as HAMT-sharded directories. + * @property {boolean} [trickle=false] - If true will use the + * [trickle DAG](https://godoc.org/github.com/ipsn/go-ipfs/gxlibs/github.com/ipfs/go-unixfs/importer/trickle) + * format for DAG generation. + * @property {boolean} [wrapWithDirectory=false] - Adds a wrapping node around + * the content. + * + * @typedef {import('ipfs-core-utils/src/files/normalise-input/normalise-input').Source} FileStream + * @typedef {import('../../utils').MTime} MTime + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + */ diff --git a/packages/ipfs-core/src/components/add-all/utils.js b/packages/ipfs-core/src/components/add-all/utils.js index 15a2c01bf2..662ccb1cd2 100644 --- a/packages/ipfs-core/src/components/add-all/utils.js +++ b/packages/ipfs-core/src/components/add-all/utils.js @@ -1,16 +1,28 @@ 'use strict' /** + * @typedef {Object} FixedChunkerOptions + * @property {'fixed'} chunker + * @property {number} [maxChunkSize] + * + * @typedef {Object} RabinChunkerOptions + * @property {'rabin'} chunker + * @property {number} avgChunkSize + * @property {number} [minChunkSize] + * @property {number} [maxChunkSize] + * + * @typedef {FixedChunkerOptions|RabinChunkerOptions} ChunkerOptions + * * Parses chunker string into options used by DAGBuilder in ipfs-unixfs-engine * * - * @param {string} chunker - Chunker algorithm supported formats: + * @param {string} [chunker] - Chunker algorithm supported formats: * "size-{size}" * "rabin" * "rabin-{avg}" * "rabin-{min}-{avg}-{max}" * - * @returns {Object} Chunker options for DAGBuilder + * @returns {ChunkerOptions} Chunker options for DAGBuilder */ const parseChunkerString = (chunker) => { if (!chunker) { @@ -38,6 +50,11 @@ const parseChunkerString = (chunker) => { } /** + * @typedef {Object} RabinChunkerSettings + * @property {number} avgChunkSize + * @property {number} [minChunkSize] + * @property {number} [maxChunkSize] + * * Parses rabin chunker string * * @param {string} chunker - Chunker algorithm supported formats: @@ -45,7 +62,7 @@ const parseChunkerString = (chunker) => { * "rabin-{avg}" * "rabin-{min}-{avg}-{max}" * - * @returns {Object} rabin chunker options + * @returns {RabinChunkerSettings} rabin chunker options */ const parseRabinString = (chunker) => { const options = {} @@ -69,6 +86,12 @@ const parseRabinString = (chunker) => { return options } +/** + * + * @param {string} str + * @param {string} name + * @returns {number} + */ const parseChunkSize = (str, name) => { const size = parseInt(str) if (isNaN(size)) { diff --git a/packages/ipfs-core/src/components/add.js b/packages/ipfs-core/src/components/add.js index fdcdb2a60e..130885a112 100644 --- a/packages/ipfs-core/src/components/add.js +++ b/packages/ipfs-core/src/components/add.js @@ -2,10 +2,21 @@ const last = require('it-last') -/** - * @typedef {import('./add-all').Source} Source - * @typedef {import('./add-all').UnixFSEntry} UnixFSEntry - */ +module.exports = ({ addAll }) => { + /** + * Import a file or data into IPFS. + * + * @param {Source} source + * @param {AddOptions & AbortOptions} [options] + * @returns {AddResult} + */ + async function add (source, options) { // eslint-disable-line require-await + /** @type {UnixFSEntry} - Could be undefined if empty */ + const result = (await last(addAll(source, options))) + return result + } + return add +} /** * @typedef {object} AddOptions @@ -18,24 +29,12 @@ const last = require('it-last') * @property {boolean} [rawLeaves] - if true, DAG leaves will contain raw file data and not be wrapped in a protobuf (default: `false`) * @property {boolean} [trickle] - if true will use the [trickle DAG](https://godoc.org/github.com/ipsn/go-ipfs/gxlibs/github.com/ipfs/go-unixfs/importer/trickle) format for DAG generation (default: `false`) * @property {boolean} [wrapWithDirectory] - Adds a wrapping node around the content (default: `false`) - */ - -/** - * Import a file or data into IPFS. * - * @template {Record} ExtraOptions - * @callback Add - * @param {Source} source - Data to import - * @param {AddOptions & import('../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} + * @typedef {Promise} AddResult + * + * @typedef {import('ipfs-core-utils/src/files/normalise-input/normalise-input').FileInput} Source + * + * @typedef {import('./add-all').UnixFSEntry} UnixFSEntry + * + * @typedef {import('../utils').AbortOptions} AbortOptions */ - -module.exports = ({ addAll }) => { - /** - * @type {Add<{}>} - */ - async function add (source, options) { // eslint-disable-line require-await - return last(addAll(source, options)) - } - return add -} diff --git a/packages/ipfs-core/src/components/bitswap/stat.js b/packages/ipfs-core/src/components/bitswap/stat.js index b39168090d..237a54d80a 100644 --- a/packages/ipfs-core/src/components/bitswap/stat.js +++ b/packages/ipfs-core/src/components/bitswap/stat.js @@ -5,32 +5,38 @@ const CID = require('cids') const { withTimeoutOption } = require('../../utils') /** - * @typedef {object} BitswapStats - An object that contains information about the bitswap agent - * @property {number} provideBufLen - an integer - * @property {import('cids')[]} wantlist - * @property {string[]} peers - array of peer IDs as Strings - * @property {Big} blocksReceived - * @property {Big} dataReceived - * @property {Big} blocksSent - * @property {Big} dataSent - * @property {Big} dupBlksReceived - * @property {Big} dupDataReceived + * @param {Object} config + * @param {import('..').IPFSBitSwap} config.bitswap */ - -/** - * Show diagnostic information on the bitswap agent. - * - * @template {Record} ExtraOptions - * @callback Stat - * @param {import('../../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - */ - module.exports = ({ bitswap }) => { /** - * @type {Stat<{}>} + * Show diagnostic information on the bitswap agent. + * Note: `bitswap.stat` and `stats.bitswap` can be used interchangeably. + * + * @param {import('../../utils').AbortOptions} [_options] + * @returns {Promise} + * + * @example + * ```js + * const stats = await ipfs.bitswap.stat() + * console.log(stats) + * // { + * // provideBufLen: 0, + * // wantlist: [ CID('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM') ], + * // peers: + * // [ 'QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM', + * // 'QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu', + * // 'QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd' ], + * // blocksReceived: 0, + * // dataReceived: 0, + * // blocksSent: 0, + * // dataSent: 0, + * // dupBlksReceived: 0, + * // dupDataReceived: 0 + * // } + * ``` */ - async function stat (options) { // eslint-disable-line require-await, @typescript-eslint/no-unused-vars + async function stat (_options) { // eslint-disable-line require-await const snapshot = bitswap.stat().snapshot return { @@ -48,3 +54,18 @@ module.exports = ({ bitswap }) => { return withTimeoutOption(stat) } + +/** + * @typedef {object} BitswapStats - An object that contains information about the bitswap agent + * @property {number} provideBufLen - an integer + * @property {CID[]} wantlist + * @property {string[]} peers - array of peer IDs as Strings + * @property {Big} blocksReceived + * @property {Big} dataReceived + * @property {Big} blocksSent + * @property {Big} dataSent + * @property {Big} dupBlksReceived + * @property {Big} dupDataReceived + * + * @typedef {import('..').CID} CID + */ diff --git a/packages/ipfs-core/src/components/bitswap/unwant.js b/packages/ipfs-core/src/components/bitswap/unwant.js index aa2b81f96d..455696b23c 100644 --- a/packages/ipfs-core/src/components/bitswap/unwant.js +++ b/packages/ipfs-core/src/components/bitswap/unwant.js @@ -5,22 +5,28 @@ const errCode = require('err-code') const { withTimeoutOption } = require('../../utils') /** - * @typedef {import('cids')} CID + * @param {Object} config + * @param {import('..').IPFSBitSwap} config.bitswap */ - -/** - * Removes one or more CIDs from the wantlist - * - * @template {Record} ExtraOptions - * @callback Unwant - * @param {CID | CID[]} cids - The CIDs to remove from the wantlist - * @param {import('../../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - A promise that resolves once the request is complete - */ - module.exports = ({ bitswap }) => { /** - * @type {Unwant<{}>} + * Removes one or more CIDs from the wantlist + * + * @param {CID | CID[]} cids - The CIDs to remove from the wantlist + * @param {AbortOptions} [options] + * @returns {Promise} - A promise that resolves once the request is complete + * @example + * ```JavaScript + * let list = await ipfs.bitswap.wantlist() + * console.log(list) + * // [ CID('QmHash') ] + * + * await ipfs.bitswap.unwant(cid) + * + * list = await ipfs.bitswap.wantlist() + * console.log(list) + * // [] + * ``` */ async function unwant (cids, options) { // eslint-disable-line require-await if (!Array.isArray(cids)) { @@ -38,3 +44,8 @@ module.exports = ({ bitswap }) => { return withTimeoutOption(unwant) } + +/** + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js b/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js index 4afcfc6377..6f79ab8c9d 100644 --- a/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js +++ b/packages/ipfs-core/src/components/bitswap/wantlist-for-peer.js @@ -4,23 +4,23 @@ const PeerId = require('peer-id') const { withTimeoutOption } = require('../../utils') /** - * @typedef {import('cids')} CID - * @typedef {import('peer-id')} PeerId + * @param {Object} config + * @param {import('..').IPFSBitSwap} config.bitswap */ - -/** - * Returns the wantlist for a connected peer - * - * @template {Record} ExtraOptions - * @callback WantlistForPeer - * @param {PeerId | CID | string | Buffer} peerId - A peer ID to return the wantlist for\ - * @param {import('../../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - An array of CIDs currently in the wantlist - */ - module.exports = ({ bitswap }) => { /** - * @type {WantlistForPeer<{}>} + * Returns the wantlist for a connected peer + * + * @param {PeerId | CID | string | Uint8Array} peerId - A peer ID to return the wantlist for\ + * @param {AbortOptions} [options] + * @returns {Promise} - An array of CIDs currently in the wantlist + * + * @example + * ```js + * const list = await ipfs.bitswap.wantlistForPeer(peerId) + * console.log(list) + * // [ CID('QmHash') ] + * ``` */ async function wantlistForPeer (peerId, options = {}) { // eslint-disable-line require-await const list = bitswap.wantlistForPeer(PeerId.createFromCID(peerId), options) @@ -30,3 +30,17 @@ module.exports = ({ bitswap }) => { return withTimeoutOption(wantlistForPeer) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + * @typedef {import('..').PeerId} PeerId + */ + +/** + * @template ExtraOptions + * @callback WantlistForPeer + * @param {PeerId | CID | string | Uint8Array} peerId + * @param {AbortOptions & ExtraOptions} [options] + * @returns {Promise} + */ diff --git a/packages/ipfs-core/src/components/bitswap/wantlist.js b/packages/ipfs-core/src/components/bitswap/wantlist.js index ae29dc0f5f..94fed01ebb 100644 --- a/packages/ipfs-core/src/components/bitswap/wantlist.js +++ b/packages/ipfs-core/src/components/bitswap/wantlist.js @@ -3,21 +3,21 @@ const { withTimeoutOption } = require('../../utils') /** - * @typedef {import('cids')} CID + * @param {Object} config + * @param {import('..').IPFSBitSwap} config.bitswap */ - -/** - * Returns the wantlist for your node - * - * @template {Record} ExtraOptions - * @callback WantlistFn - * @param {import('../../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - An array of CIDs currently in the wantlist - */ - module.exports = ({ bitswap }) => { /** - * @type {WantlistFn<{}>} + * Returns the wantlist for your node + * + * @param {AbortOptions} [options] + * @returns {Promise} - An array of CIDs currently in the wantlist. + * @example + * ```js + * const list = await ipfs.bitswap.wantlist() + * console.log(list) + * // [ CID('QmHash') ] + * ``` */ async function wantlist (options = {}) { // eslint-disable-line require-await const list = bitswap.getWantlist(options) @@ -27,3 +27,8 @@ module.exports = ({ bitswap }) => { return withTimeoutOption(wantlist) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + */ diff --git a/packages/ipfs-core/src/components/block/get.js b/packages/ipfs-core/src/components/block/get.js index 90cf605631..4f376fb4a5 100644 --- a/packages/ipfs-core/src/components/block/get.js +++ b/packages/ipfs-core/src/components/block/get.js @@ -4,31 +4,25 @@ const { cleanCid } = require('./utils') const { withTimeoutOption } = require('../../utils') /** - * @typedef {import('cids')} CID - * @typedef {import('ipld-block')} Block + * @param {Object} config + * @param {import('..').IPFSBlockService} config.blockService + * @param {import('..').Preload} config.preload */ - -/** - * @typedef {object} PreloadOptions - * @property {boolean} [preload] - (default: `true`) - */ - -/** - * Get a raw IPFS block. - * - * @template {Record} ExtraOptions - * @callback BlockGet - * @param {CID | string | Buffer} cid - A CID that corresponds to the desired block - * @param {import('../../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - A Block type object, containing both the data and the hash of the block - */ - module.exports = ({ blockService, preload }) => { /** - * @type {BlockGet} + * Get a raw IPFS block. + * + * @param {CID | string | Uint8Array} cid - A CID that corresponds to the desired block + * @param {GetOptions & AbortOptions} [options] + * @returns {Promise} - A Block type object, containing both the data and the hash of the block + * + * @example + * ```js + * const block = await ipfs.block.get(cid) + * console.log(block.data) + * ``` */ - async function get (cid, options) { // eslint-disable-line require-await - options = options || {} + async function get (cid, options = {}) { // eslint-disable-line require-await cid = cleanCid(cid) if (options.preload !== false) { @@ -40,3 +34,12 @@ module.exports = ({ blockService, preload }) => { return withTimeoutOption(get) } + +/** + * @typedef {Object} GetOptions + * @property {boolean} [preload=true] + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + * @typedef {import('..').IPLDBlock} IPLDBlock + */ diff --git a/packages/ipfs-core/src/components/block/put.js b/packages/ipfs-core/src/components/block/put.js index f22a26232c..8303315b59 100644 --- a/packages/ipfs-core/src/components/block/put.js +++ b/packages/ipfs-core/src/components/block/put.js @@ -7,38 +7,54 @@ const isIPFS = require('is-ipfs') const { withTimeoutOption } = require('../../utils') /** - * @typedef {import('cids')} CID - * @typedef {import('ipld-block')} Block - * @typedef {0 | 1} CidVersion + * @param {Object} config + * @param {import('..').IPFSBlockService} config.blockService + * @param {import('..').Pin} config.pin + * @param {import('..').GCLock} config.gcLock + * @param {import('..').Preload} config.preload */ - -/** - * @typedef {object} BlockPutOptions - * @property {CID} [cid] - A CID to store the block under (default: `undefined`) - * @property {string} [format] - The codec to use to create the CID (default: `'dag-pb'`) - * @property {string} [mhtype] - The hashing algorithm to use to create the CID (default: `'sha2-256'`) - * @property {number} [mhlen] - * @property {CidVersion} [version] - The version to use to create the CID (default: `0`) - * @property {boolean} [pin] - If true, pin added blocks recursively (default: `false`) - */ - -/** - * Stores input as an IPFS block. - * - * @template {Record} ExtraOptions - * @callback BlockPut - * @param {Buffer | Block} block - The block or data to store - * @param {BlockPutOptions & import('../../utils').AbortOptions & ExtraOptions} [options] - **Note:** If you pass a `Block` instance as the block parameter, you don't need to pass options, as the block instance will carry the CID value as a property. - * @returns {Promise} - A Block type object, containing both the data and the hash of the block - */ - module.exports = ({ blockService, pin, gcLock, preload }) => { /** - * @type {BlockPut} + * Stores input as an IPFS block. + * + * **Note:** If you pass a `Block` instance as the block parameter, you + * don't need to pass options, as the block instance will carry the CID + * value as a property. + * + * @param {Uint8Array | IPLDBlock} block - The block or data to store + * @param {PutOptions & AbortOptions} [options] - **Note:** If you pass a `Block` instance as the block parameter, you don't need to pass options, as the block instance will carry the CID value as a property. + * @returns {Promise} - A Block type object, containing both the data and the hash of the block + * @example + * ```js + * // Defaults + * const encoder = new TextEncoder() + * const decoder = new TextDecoder() + * + * const bytes = encoder.encode('a serialized object') + * const block = await ipfs.block.put(bytes) + * + * console.log(decoder.decode(block.data)) + * // Logs: + * // a serialized object + * console.log(block.cid.toString()) + * // Logs: + * // the CID of the object + * + * // With custom format and hashtype through CID + * const CID = require('cids') + * const another = encoder.encode('another serialized object') + * const cid = new CID(1, 'dag-pb', multihash) + * const block = await ipfs.block.put(another, cid) + * console.log(decoder.decode(block.data)) + * + * // Logs: + * // a serialized object + * console.log(block.cid.toString()) + * // Logs: + * // the CID of the object + * ``` */ - async function put (block, options) { - options = options || {} - + async function put (block, options = {}) { if (Array.isArray(block)) { throw new Error('Array is not supported') } @@ -50,13 +66,15 @@ module.exports = ({ blockService, pin, gcLock, preload }) => { const mhtype = options.mhtype || 'sha2-256' const format = options.format || 'dag-pb' - /** @type {CidVersion} */ - let cidVersion + /** @type {CIDVersion} */ + let cidVersion = 1 if (options.version == null) { // Pick appropriate CID version cidVersion = mhtype === 'sha2-256' && format === 'dag-pb' ? 0 : 1 } else { + // @ts-ignore - options.version is a {number} but the CID constructor arg version is a {0|1} + // TODO: https://github.com/multiformats/js-cid/pull/129 cidVersion = options.version } @@ -93,3 +111,19 @@ module.exports = ({ blockService, pin, gcLock, preload }) => { return withTimeoutOption(put) } + +/** + * @typedef {Object} PutOptions + * @property {CID} [cid] - A CID to store the block under (default: `undefined`) + * @property {string} [format='dag-pb'] - The codec to use to create the CID (default: `'dag-pb'`) + * @property {string} [mhtype='sha2-256'] - The hashing algorithm to use to create the CID (default: `'sha2-256'`) + * @property {number} [mhlen] + * @property {CIDVersion} [version=0] - The version to use to create the CID (default: `0`) + * @property {boolean} [pin=false] - If true, pin added blocks recursively (default: `false`) + * @property {boolean} [preload] + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + * @typedef {import('..').IPLDBlock} IPLDBlock + * @typedef {0|1} CIDVersion + */ diff --git a/packages/ipfs-core/src/components/block/rm.js b/packages/ipfs-core/src/components/block/rm.js index 1f7e1e6315..db7414e31d 100644 --- a/packages/ipfs-core/src/components/block/rm.js +++ b/packages/ipfs-core/src/components/block/rm.js @@ -10,10 +10,33 @@ const { withTimeoutOption } = require('../../utils') const BLOCK_RM_CONCURRENCY = 8 +/** + * @param {Object} config + * @param {import('..').IPFSBlockService} config.blockService + * @param {import('../pin/pin-manager')} config.pinManager + * @param {import('..').GCLock} config.gcLock + */ module.exports = ({ blockService, gcLock, pinManager }) => { - return withTimeoutOption(async function * rm (cids, options) { - options = options || {} - + /** + /** + * Remove one or more IPFS block(s). + * + * @param {CID[]|CID} cids - CID(s) corresponding to the block(s) to be removed. + * @param {RmOptions & AbortOptions} [options] + * @returns {AsyncIterable} + * + * @example + * ```js + * for await (const result of ipfs.block.rm(cid)) { + * if (result.error) { + * console.error(`Failed to remove block ${result.cid} due to ${result.error.message}`) + * } else { + * console.log(`Removed block ${result.cid}`) + * } + * } + * ``` + */ + async function * rm (cids, options = {}) { if (!Array.isArray(cids)) { cids = [cids] } @@ -63,5 +86,28 @@ module.exports = ({ blockService, gcLock, pinManager }) => { } finally { release() } - }) + } + + return withTimeoutOption(rm) } + +/** + * @typedef {Object} RmOptions + * @property {boolean} [force=false] - Ignores nonexistent blocks + * @property {boolean} [quiet=false] - Write minimal output + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * + * @typedef {RmSucceess|RmFailure} RmResult + * Note: If an error is present for a given object, the block with + * that cid was not removed and the error will contain the reason why, + * for example if the block was pinned. + * + * @typedef {Object} RmSucceess + * @property {CID} cid + * @property {void} [error] + * + * @typedef {Object} RmFailure + * @property {CID} cid + * @property {Error} error + */ diff --git a/packages/ipfs-core/src/components/block/stat.js b/packages/ipfs-core/src/components/block/stat.js index 488d11c00d..62f6c7ea3b 100644 --- a/packages/ipfs-core/src/components/block/stat.js +++ b/packages/ipfs-core/src/components/block/stat.js @@ -3,9 +3,30 @@ const { cleanCid } = require('./utils') const { withTimeoutOption } = require('../../utils') +/** + * @param {Object} config + * @param {import('..').IPFSBlockService} config.blockService + * @param {import('..').Preload} config.preload + */ module.exports = ({ blockService, preload }) => { - return withTimeoutOption(async function stat (cid, options) { - options = options || {} + /** + /** + * Print information of a raw IPFS block. + * + * @param {CID} cid - CID of the block to get a stats for. + * @param {StatOptions & AbortOptions} options + * @returns {Promise} + * @example + * ```js + * const cid = CID.from('QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ') + * const stats = await ipfs.block.stat(cid) + * console.log(stats.cid.toString()) + * // Logs: QmQULBtTjNcMwMr4VMNknnVv3RpytrLSdgpvMcTnfNhrBJ + * console.log(stat.size) + * // Logs: 3739 + * ``` + */ + async function stat (cid, options = {}) { cid = cleanCid(cid) if (options.preload !== false) { @@ -15,5 +36,21 @@ module.exports = ({ blockService, preload }) => { const block = await blockService.get(cid) return { cid, size: block.data.length } - }) + } + + return withTimeoutOption(stat) } + +/** + * @typedef {Object} Stat + * An object containing the block's info + * @property {CID} cid + * @property {number} size + * + * @typedef {Object} StatOptions + * @property {boolean} [preload] + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * + * @typedef {import('..').CID} CID + */ diff --git a/packages/ipfs-core/src/components/block/utils.js b/packages/ipfs-core/src/components/block/utils.js index 76ca4fa293..d384db759f 100644 --- a/packages/ipfs-core/src/components/block/utils.js +++ b/packages/ipfs-core/src/components/block/utils.js @@ -3,6 +3,10 @@ const CID = require('cids') const errCode = require('err-code') +/** + * @param {string|Uint8Array|CID} cid + * @returns {CID} + */ exports.cleanCid = cid => { if (CID.isCID(cid)) { return cid @@ -10,6 +14,7 @@ exports.cleanCid = cid => { // CID constructor knows how to do the cleaning :) try { + // @ts-ignore - string|Uint8Array union seems to confuse CID typedefs. return new CID(cid) } catch (err) { throw errCode(err, 'ERR_INVALID_CID') diff --git a/packages/ipfs-core/src/components/bootstrap/add.js b/packages/ipfs-core/src/components/bootstrap/add.js index e3fc4b914d..a9f22c43a1 100644 --- a/packages/ipfs-core/src/components/bootstrap/add.js +++ b/packages/ipfs-core/src/components/bootstrap/add.js @@ -3,16 +3,35 @@ const { isValidMultiaddr } = require('./utils') const { withTimeoutOption } = require('../../utils') +/** + * @param {import('..').IPFSRepo} repo + */ module.exports = ({ repo }) => { - return withTimeoutOption(async function add (multiaddr, options = {}) { + /** + * Add a peer address to the bootstrap list + * + * @param {Multiaddr} multiaddr - The address of a network peer + * @param {AbortOptions} [options] + * @returns {Promise} + * @example + * ```js + * const validIp4 = '/ip4/104....9z' + * + * const res = await ipfs.bootstrap.add(validIp4) + * console.log(res.Peers) + * // Logs: + * // ['/ip4/104....9z'] + * ``` + */ + async function add (multiaddr, options = {}) { if (!isValidMultiaddr(multiaddr)) { throw new Error(`${multiaddr} is not a valid Multiaddr`) } const config = await repo.config.getAll(options) - if (config.Bootstrap.indexOf(multiaddr) === -1) { - config.Bootstrap.push(multiaddr) + if (config.Bootstrap.indexOf(multiaddr.toString()) === -1) { + config.Bootstrap.push(multiaddr.toString()) } await repo.config.set(config) @@ -20,5 +39,14 @@ module.exports = ({ repo }) => { return { Peers: [multiaddr] } - }) + } + + return withTimeoutOption(add) } + +/** + * @typedef {import('./utils').Peers} Peers + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + * @typedef {import('..').Multiaddr} Multiaddr + */ diff --git a/packages/ipfs-core/src/components/bootstrap/clear.js b/packages/ipfs-core/src/components/bootstrap/clear.js index 100e01964a..2637e4d345 100644 --- a/packages/ipfs-core/src/components/bootstrap/clear.js +++ b/packages/ipfs-core/src/components/bootstrap/clear.js @@ -1,15 +1,42 @@ 'use strict' const { withTimeoutOption } = require('../../utils') +const Multiaddr = require('multiaddr') +/** + * @param {Object} config + * @param {import('..').IPFSRepo} config.repo + */ module.exports = ({ repo }) => { - return withTimeoutOption(async function clear (options = {}) { + /** + * Remove all peer addresses from the bootstrap list + * + * @param {AbortOptions} options + * @returns {Promise} + * @example + * ```js + * const res = await ipfs.bootstrap.clear() + * console.log(res.Peers) + * // Logs: + * // [address1, address2, ...] + * ``` + */ + async function clear (options = {}) { const config = await repo.config.getAll(options) const removed = config.Bootstrap || [] config.Bootstrap = [] await repo.config.set(config) - return { Peers: removed } - }) + return { Peers: removed.map(ma => new Multiaddr(ma)) } + } + + return withTimeoutOption(clear) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('./utils').Peers} Peers + * @typedef {import('..').CID} CID + * @typedef {import('..').Multiaddr} Multiaddr + */ diff --git a/packages/ipfs-core/src/components/bootstrap/list.js b/packages/ipfs-core/src/components/bootstrap/list.js index 1023bd4b14..421f16792f 100644 --- a/packages/ipfs-core/src/components/bootstrap/list.js +++ b/packages/ipfs-core/src/components/bootstrap/list.js @@ -1,10 +1,36 @@ 'use strict' const { withTimeoutOption } = require('../../utils') +const Multiaddr = require('multiaddr') +/** + * @param {import('..').IPFSRepo} repo + */ module.exports = ({ repo }) => { - return withTimeoutOption(async function list (options) { + /** + * List all peer addresses in the bootstrap list + * + * @param {AbortOptions} [options] + * @returns {Promise} + * @example + * ```js + * const res = await ipfs.bootstrap.list() + * console.log(res.Peers) + * // Logs: + * // [address1, address2, ...] + * ``` + */ + async function list (options) { const peers = await repo.config.get('Bootstrap', options) - return { Peers: peers || [] } - }) + return { Peers: (peers || []).map(ma => new Multiaddr(ma)) } + } + + return withTimeoutOption(list) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('./utils').Peers} Peers + * @typedef {import('..').CID} CID + * @typedef {import('..').Multiaddr} Multiaddr + */ diff --git a/packages/ipfs-core/src/components/bootstrap/reset.js b/packages/ipfs-core/src/components/bootstrap/reset.js index 23d4134965..81a2dfc6bf 100644 --- a/packages/ipfs-core/src/components/bootstrap/reset.js +++ b/packages/ipfs-core/src/components/bootstrap/reset.js @@ -2,16 +2,42 @@ const defaultConfig = require('../../runtime/config-nodejs.js') const { withTimeoutOption } = require('../../utils') +const Multiaddr = require('multiaddr') +/** + * @param {import('..').IPFSRepo} repo + */ module.exports = ({ repo }) => { - return withTimeoutOption(async function reset (options = {}) { + /** + * List all peer addresses in the bootstrap list + * + * @param {AbortOptions} options + * @returns {Promise} + * @example + * ```js + * const res = await ipfs.bootstrap.list() + * console.log(res.Peers) + * // Logs: + * // [address1, address2, ...] + * ``` + */ + async function reset (options = {}) { const config = await repo.config.getAll(options) config.Bootstrap = defaultConfig().Bootstrap await repo.config.set(config) return { - Peers: defaultConfig().Bootstrap + Peers: defaultConfig().Bootstrap.map(ma => new Multiaddr(ma)) } - }) + } + + return withTimeoutOption(reset) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('./utils').Peers} Peers + * @typedef {import('..').CID} CID + * @typedef {import('..').Multiaddr} Multiaddr + */ diff --git a/packages/ipfs-core/src/components/bootstrap/rm.js b/packages/ipfs-core/src/components/bootstrap/rm.js index 63a135858c..88300ea939 100644 --- a/packages/ipfs-core/src/components/bootstrap/rm.js +++ b/packages/ipfs-core/src/components/bootstrap/rm.js @@ -3,17 +3,43 @@ const { isValidMultiaddr } = require('./utils') const { withTimeoutOption } = require('../../utils') +/** + * @param {import('..').IPFSRepo} repo + */ module.exports = ({ repo }) => { - return withTimeoutOption(async function rm (multiaddr, options = {}) { + /** + * Remove a peer address from the bootstrap list + * + * @param {Multiaddr} multiaddr - The address of a network peer + * @param {AbortOptions} options + * @returns {Promise} + * @example + * ```js + * const res = await ipfs.bootstrap.list() + * console.log(res.Peers) + * // Logs: + * // [address1, address2, ...] + * ``` + */ + async function rm (multiaddr, options = {}) { if (!isValidMultiaddr(multiaddr)) { throw new Error(`${multiaddr} is not a valid Multiaddr`) } const config = await repo.config.getAll(options) - config.Bootstrap = (config.Bootstrap || []).filter(ma => ma !== multiaddr) + config.Bootstrap = (config.Bootstrap || []).filter(ma => ma.toString() !== multiaddr.toString()) await repo.config.set(config) return { Peers: [multiaddr] } - }) + } + + return withTimeoutOption(rm) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('./utils').Peers} Peers + * @typedef {import('..').CID} CID + * @typedef {import('..').Multiaddr} Multiaddr + */ diff --git a/packages/ipfs-core/src/components/bootstrap/utils.js b/packages/ipfs-core/src/components/bootstrap/utils.js index 4e525ce021..48dd2c61d7 100644 --- a/packages/ipfs-core/src/components/bootstrap/utils.js +++ b/packages/ipfs-core/src/components/bootstrap/utils.js @@ -2,6 +2,10 @@ const isMultiaddr = require('mafmt').IPFS.matches +/** + * @param {any} ma + * @returns {boolean} + */ exports.isValidMultiaddr = ma => { try { return isMultiaddr(ma) @@ -9,3 +13,11 @@ exports.isValidMultiaddr = ma => { return false } } + +/** + * @typedef {Object} Peers + * An object that contains an array with all the added addresses + * @property {Array} Peers + * + * @typedef {import('..').Multiaddr} Multiaddr + */ diff --git a/packages/ipfs-core/src/components/cat.js b/packages/ipfs-core/src/components/cat.js index d454ece2d0..a2a793a6f0 100644 --- a/packages/ipfs-core/src/components/cat.js +++ b/packages/ipfs-core/src/components/cat.js @@ -3,10 +3,20 @@ const exporter = require('ipfs-unixfs-exporter') const { normalizeCidPath, withTimeoutOption } = require('../utils') +/** + * @param {Object} config + * @param {import('.').IPLD} config.ipld + * @param {import('.').Preload} config.preload + */ module.exports = function ({ ipld, preload }) { - return withTimeoutOption(async function * cat (ipfsPath, options) { - options = options || {} - + /** + * Returns content of the file addressed by a valid IPFS Path or CID. + * + * @param {CID|string} ipfsPath - An IPFS path or CID to export + * @param {Options} [options] + * @returns {AsyncIterable} + */ + async function * cat (ipfsPath, options = {}) { ipfsPath = normalizeCidPath(ipfsPath) if (options.preload !== false) { @@ -26,5 +36,20 @@ module.exports = function ({ ipld, preload }) { } yield * file.content(options) - }) + } + + return withTimeoutOption(cat) } + +/** + * @typedef {CatOptions & AbortOptions} Options + * + * @typedef {Object} CatOptions + * @property {number} [offset] - An offset to start reading the file from + * @property {number} [length] - An optional max length to read from the file + * @property {boolean} [preload] + * + * @typedef {import('../utils').AbortOptions} AbortOptions + * + * @typedef {import('.').CID} CID + */ diff --git a/packages/ipfs-core/src/components/config.js b/packages/ipfs-core/src/components/config.js index 0e4fade029..b3df1c6f03 100644 --- a/packages/ipfs-core/src/components/config.js +++ b/packages/ipfs-core/src/components/config.js @@ -4,6 +4,11 @@ const getDefaultConfig = require('../runtime/config-nodejs.js') const { withTimeoutOption } = require('../utils') const log = require('debug')('ipfs:core:config') +/** + * @param {Object} config + * @param {import('.').IPFSRepo} config.repo + * @returns {Config} + */ module.exports = ({ repo }) => { return { getAll: withTimeoutOption(repo.config.getAll), @@ -22,6 +27,11 @@ module.exports = ({ repo }) => { } } + /** + * @param {string} profileName + * @param {*} options + * @returns {Promise<{original: IPFSConfig, updated: IPFSConfig}>} + */ async function applyProfile (profileName, options = {}) { const { dryRun } = options @@ -53,7 +63,11 @@ module.exports = ({ repo }) => { } } -async function listProfiles (options) { // eslint-disable-line require-await +/** + * @param {any} _options + * @returns {Promise<{name:string, description:string}[]>} + */ +async function listProfiles (_options) { // eslint-disable-line require-await return Object.keys(profiles).map(name => ({ name, description: profiles[name].description @@ -63,6 +77,10 @@ async function listProfiles (options) { // eslint-disable-line require-await const profiles = { server: { description: 'Recommended for nodes with public IPv4 address (servers, VPSes, etc.), disables host and content discovery in local networks.', + /** + * @param {IPFSConfig} config + * @returns {IPFSConfig} + */ transform: (config) => { config.Discovery.MDNS.Enabled = false config.Discovery.webRTCStar.Enabled = false @@ -72,6 +90,10 @@ const profiles = { }, 'local-discovery': { description: 'Sets default values to fields affected by `server` profile, enables discovery in local networks.', + /** + * @param {IPFSConfig} config + * @returns {IPFSConfig} + */ transform: (config) => { config.Discovery.MDNS.Enabled = true config.Discovery.webRTCStar.Enabled = true @@ -81,6 +103,10 @@ const profiles = { }, test: { description: 'Reduces external interference, useful for running ipfs in test environments. Note that with these settings node won\'t be able to talk to the rest of the network without manual bootstrap.', + /** + * @param {IPFSConfig} config + * @returns {IPFSConfig} + */ transform: (config) => { const defaultConfig = getDefaultConfig() @@ -97,6 +123,10 @@ const profiles = { }, 'default-networking': { description: 'Restores default network settings. Inverse profile of the `test` profile.', + /** + * @param {IPFSConfig} config + * @returns {IPFSConfig} + */ transform: (config) => { const defaultConfig = getDefaultConfig() @@ -113,17 +143,28 @@ const profiles = { }, lowpower: { description: 'Reduces daemon overhead on the system. May affect node functionality,performance of content discovery and data fetching may be degraded. Recommended for low power systems.', + /** + * @param {IPFSConfig} config + * @returns {IPFSConfig} + */ transform: (config) => { - config.Swarm = config.Swarm || {} - config.Swarm.ConnMgr = config.Swarm.ConnMgr || {} - config.Swarm.ConnMgr.LowWater = 20 - config.Swarm.ConnMgr.HighWater = 40 + const Swarm = config.Swarm || {} + const ConnMgr = Swarm.ConnMgr || {} + ConnMgr.LowWater = 20 + ConnMgr.HighWater = 40 + + Swarm.ConnMgr = ConnMgr + config.Swarm = Swarm return config } }, 'default-power': { description: 'Inverse of "lowpower" profile.', + /** + * @param {IPFSConfig} config + * @returns {IPFSConfig} + */ transform: (config) => { const defaultConfig = getDefaultConfig() @@ -136,3 +177,298 @@ const profiles = { } module.exports.profiles = profiles + +/** + * @typedef {Object} Config + * @property {Get} get + * @property {GetAll} getAll + * @property {Set} set + * @property {Replace} replace + * @property {Profiles} profiles + * + * @callback Get + * Returns the currently being used config. If the daemon is off, it returns + * the stored config. + * + * @param {string} [key] - The key of the value that should be fetched from the + * config file. If no key is passed, then the whole config will be returned. + * @param {AbortOptions} [options] + * @returns {Promise} - An object containing the configuration of the IPFS node + * @example + * const config = await ipfs.config.get('Addresses.Swarm') + * console.log(config) + * + * + * @callback GetAll + * Returns the full config been used. If the daemon is off, it returns the + * stored config. + * + * @param {AbortOptions} [options] + * @returns {Promise} + * @example + * const config = await ipfs.config.getAll() + * console.log(config) + * + * @callback Set + * Adds or replaces a config value. Note that this operation will not spark the + * restart of any service, i.e: if a config.replace changes the multiaddrs of + * the Swarm, Swarm will have to be restarted manually for the changes to take + * an effect. + * + * @param {string} key - The key of the value that should be added or replaced. + * @param {JSON} value - The value to be set. + * @param {AbortOptions} [options] + * @returns {Promise} - Promise succeeds if config change succeeded, + * otherwise fails with error. + * @example + * // Disable MDNS Discovery + * await ipfs.config.set('Discovery.MDNS.Enabled', false) + * + * @callback Replace + * Adds or replaces a config file. + * + * Note that this operation will not spark the restart of any service, + * i.e: if a config.replace changes the multiaddrs of the Swarm, Swarm will + * have to be restarted manually for the changes to take an effect. + * + * @param {Partial} value - A new configuration. + * @param {AbortOptions} [options] + * @returns {Promise} + * @example + * const newConfig = { + * Bootstrap: [] + * } + * await ipfs.config.replace(newConfig) + * + * @typedef {Object} Profiles + * @property {ListProfiles} list + * @property {ApplyProfile} apply + * + * @callback ListProfiles + * List available config profiles + * @param {AbortOptions} [options] + * @returns {Promise} - An array with all the available config profiles + * @example + * const profiles = await ipfs.config.profiles.list() + * profiles.forEach(profile => { + * console.info(profile.name, profile.description) + * }) + * + * @typedef {Object} Profile + * @property {string} description + * @property {string} name + * + * + * @callback ApplyProfile + * List available config profiles + * @param {string} name + * @param {ApplyOptions} [options] + * @returns {Promise<{original: IPFSConfig, updated: IPFSConfig}>} + * + * @typedef {Object} ApplyOptionsExt + * @property {boolean} [dryRun=false] - If true does not apply the profile + * @typedef {AbortOptions & ApplyOptionsExt} ApplyOptions + * + * + * @typedef {import('../utils').AbortOptions} AbortOptions + * + * @typedef {Object} IPFSConfig + * @property {AddressConfig} Addresses + * @property {string} [Profiles] + * @property {string[]} [Bootstrap] + * @property {DiscoveryConfig} Discovery + * @property {DatastoreConfig} [Datastore] + * @property {IdentityConfig} [Identity] + * @property {KeychainConfig} [Keychain] + * @property {PubsubConfig} [Pubsub] + * @property {SwarmConfig} [Swarm] + * + * @typedef {Object} AddressConfig + * Contains information about various listener addresses to be used by this node. + * @property {APIAddress} [API='/ip4/127.0.0.1/tcp/5002'] + * @property {DelegateAddress} [Delegates=[]] + * @property {GatewayAddress} [Gateway='/ip4/127.0.0.1/tcp/9090'] + * @property {SwarmAddress} [Swarm=['/ip4/0.0.0.0/tcp/4002', '/ip4/127.0.0.1/tcp/4003/ws']] + * * + * @typedef {string} Multiaddr + * Composable and future-proof network address following [Multiaddr][] + * specification. + * + * [Multiaddr]:https://github.com/multiformats/multiaddr/ + * + * @typedef {Multiaddr|Multiaddr[]} APIAddress + * The IPFS daemon exposes an [HTTP API][] that allows to control the node and + * run the same commands as you can do from the command line. It is defined on + * the [HTTP API][] Spec. + * + * [Multiaddr][] or array of [Multiaddr][] describing the address(es) to serve the + * [HTTP API][] on. + * + * [Multiaddr]:https://github.com/multiformats/multiaddr/ + * [HTTP API]:https://docs.ipfs.io/reference/api/http + * + * @typedef {Multiaddr[]} DelegateAddress + * Delegate peers are used to find peers and retrieve content from the network + * on your behalf. + * + * Array of [Multiaddr][] describing which addresses to use as delegate nodes. + * + * [Multiaddr]:https://github.com/multiformats/multiaddr/ + * + * @typedef {Multiaddr|Multiaddr[]} GatewayAddress + * A gateway is exposed by the IPFS daemon, which allows an easy way to access + * content from IPFS, using an IPFS path. + * + * [Multiaddr][] or array of [Multiaddr][] describing the address(es) to serve + * the gateway on. + * + * [Multiaddr]:https://github.com/multiformats/multiaddr/ + * + * @typedef {Multiaddr[]} SwarmAddress + * Array of [Multiaddr][] describing which addresses to listen on for p2p swarm + * connections. + * + * [Multiaddr]:https://github.com/multiformats/multiaddr/ + * + * + * @typedef {Multiaddr[]} BootstrapConfig + * Bootstrap is an array of [Multiaddr][] of trusted nodes to connect to in order + * to initiate a connection to the network. + * + * [Multiaddr]:https://github.com/multiformats/multiaddr/ + * + * @typedef {Object} DatastoreConfig + * Contains information related to the construction and operation of the on-disk + * storage system. + * @property {DatastoreSpec} [Spec] + * + * @typedef {Object} DatastoreSpec + * Spec defines the structure of the IPFS datastore. It is a composable + * structure, where each datastore is represented by a JSON object. Datastores + * can wrap other datastores to provide extra functionality (e.g. metrics, + * logging, or caching). + * + * This can be changed manually, however, if you make any changes that require + * a different on-disk structure, you will need to run the [ipfs-ds-convert][] + * tool to migrate data into the new structures. + * + * [ipfs-ds-convert]:https://github.com/ipfs/ipfs-ds-convert + * + * Default: + * ```json + * { + * "mounts": [ + * { + * "child": { + * "path": "blocks", + * "shardFunc": "/repo/flatfs/shard/v1/next-to-last/2", + * "sync": true, + * "type": "flatfs" + * }, + * "mountpoint": "/blocks", + * "prefix": "flatfs.datastore", + * "type": "measure" + * }, + * { + * "child": { + * "compression": "none", + * "path": "datastore", + * "type": "levelds" + * }, + * "mountpoint": "/", + * "prefix": "leveldb.datastore", + * "type": "measure" + * } + * ], + * "type": "mount" + * } + * ``` + * + * @typedef {Object} DiscoveryConfig + * Contains options for configuring IPFS node discovery mechanisms. + * @property {MDNSDiscovery} MDNS + * @property {WebRTCStarDiscovery} webRTCStar + * + * @typedef {Object} MDNSDiscovery + * Multicast DNS is a discovery protocol that is able to find other peers on the local network. + * @property {boolean} [Enabled=true] - A boolean value for whether or not MDNS + * should be active. + * @property {number} [Interval=10] - A number of seconds to wait between + * discovery checks. + * + * @typedef {Object} WebRTCStarDiscovery + * WebRTCStar is a discovery mechanism prvided by a signalling-star that allows + * peer-to-peer communications in the browser. + * @property {boolean} [Enabled=true] - A boolean value for whether or not + * webRTCStar should be active. + * + * @typedef {Object} IdentityConfig + * @property {PeerID} [PeerID] + * @property {PrivateKey} [PrivKey] + * + * @typedef {string} PeerID + * The unique PKI identity label for this configs peer. Set on init and never + * read, its merely here for convenience. IPFS will always generate the peerID + * from its keypair at runtime. + * + * @typedef {string} PrivateKey + * The base64 encoded protobuf describing (and containing) the nodes private key. + * + * @typedef {Object} KeychainConfig + * We can customize the key management and criptographically protected messages + * by changing the Keychain options. Those options are used for generating the + * derived encryption key (DEK). + * + * The DEK object, along with the passPhrase, is the input to a PBKDF2 function. + * + * You can check the [parameter choice for pbkdf2](https://cryptosense.com/parameter-choice-for-pbkdf2/) + * for more information. + * @property {DEK} DEK + * + * @typedef {Object} DEK + * @property {number} keyLength + * @property {number} iterationCount + * @property {string} salt + * @property {string} hash + * + * @typedef {Object} PubsubConfig + * Options for configuring the pubsub subsystem. It is important pointing out + * that this is not supported in the browser. If you want to configure a + * different pubsub router in the browser you must configure + * `libp2p.modules.pubsub` options instead. + * + * @property {PubSubRouter} [Router='gossipsub'] + * @property {boolean} [Enabled=true] + * + * @typedef {'gossipsub'|'floodsub'} PubSubRouter + * A string value for specifying which pubsub routing protocol to use. You can + * either use `'gossipsub'` in order to use the [ChainSafe/gossipsub-js] + * (https://github.com/ChainSafe/gossipsub-js) implementation, or `'floodsub'` + * to use the [libp2p/js-libp2p-floodsub](https://github.com/libp2p/js-libp2p-floodsub) + * implementation. + * + * You can read more about these implementations on the [libp2p/specs/pubsub] + * (https://github.com/libp2p/specs/tree/master/pubsub) document. + * + * @typedef {Object} SwarmConfig + * Options for configuring the swarm. + * @property {ConnMgrConfig} [ConnMgr] + * + * @typedef {Object} ConnMgrConfig + * The connection manager determines which and how many connections to keep and + * can be configured to keep. + * + * The "basic" connection manager tries to keep between `LowWater` and + * `HighWater` connections. It works by: + * + * 1. Keeping all connections until `HighWater` connections is reached. + * 2. Once `HighWater` is reached, it closes connections until `LowWater` is + * reached. + * + * @property {number} [LowWater=200] - The minimum number of connections to + * maintain. + * @property {number} [HighWater=500] - The number of connections that, when + * exceeded, will trigger a connection GC operation. + * + * {{LowWater?:number, HighWater?:number}} ConnMgr + */ diff --git a/packages/ipfs-core/src/components/dag/get.js b/packages/ipfs-core/src/components/dag/get.js index e3d1adf7d0..b0b305ea91 100644 --- a/packages/ipfs-core/src/components/dag/get.js +++ b/packages/ipfs-core/src/components/dag/get.js @@ -5,8 +5,61 @@ const first = require('it-first') const last = require('it-last') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') +/** + * @param {Object} config + * @param {import('..').IPLD} config.ipld + * @param {import('..').Preload} config.preload + */ module.exports = ({ ipld, preload }) => { - return withTimeoutOption(async function get (ipfsPath, options = {}) { + /** + * Retrieve an IPLD format node + * + * @param {CID} ipfsPath - A DAG node that follows one of the supported IPLD formats + * @param {GetOptions & AbortOptions} [options] - An optional configration + * @returns {Promise} + * @example + * ```js + * ```JavaScript + * // example obj + * const obj = { + * a: 1, + * b: [1, 2, 3], + * c: { + * ca: [5, 6, 7], + * cb: 'foo' + * } + * } + * + * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + * console.log(cid.toString()) + * // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 + * + * async function getAndLog(cid, path) { + * const result = await ipfs.dag.get(cid, { path }) + * console.log(result.value) + * } + * + * await getAndLog(cid, '/a') + * // Logs: + * // 1 + * + * await getAndLog(cid, '/b') + * // Logs: + * // [1, 2, 3] + * + * await getAndLog(cid, '/c') + * // Logs: + * // { + * // ca: [5, 6, 7], + * // cb: 'foo' + * // } + * + * await getAndLog(cid, '/c/ca/1') + * // Logs: + * // 6 + * ``` + */ + const get = async function get (ipfsPath, options = {}) { const { cid, path @@ -21,16 +74,34 @@ module.exports = ({ ipld, preload }) => { } if (options.path) { - if (options.localResolve) { - return first(ipld.resolve(cid, options.path)) - } - - return last(ipld.resolve(cid, options.path)) + const result = options.localResolve + /** @type {DagEntry} - first will return undefined if empty */ + ? (await first(ipld.resolve(cid, options.path))) + /** @type {DagEntry} - last will return undefined if empty */ + : (await last(ipld.resolve(cid, options.path))) + return result } return { value: await ipld.get(cid, options), remainderPath: '' } - }) + } + + return withTimeoutOption(get) } + +/** + * @typedef {Object} GetOptions + * @property {boolean} [localResolve=false] + * @property {number} [timeout] + * @property {boolean} [preload=false] + * @property {string} [path] - An optional path within the DAG to resolve + * + * @typedef {Object} DagEntry + * @property {Object} value + * @property {string} remainderPath + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/dag/put.js b/packages/ipfs-core/src/components/dag/put.js index 92a270ad23..dc2e786f1d 100644 --- a/packages/ipfs-core/src/components/dag/put.js +++ b/packages/ipfs-core/src/components/dag/put.js @@ -1,13 +1,39 @@ 'use strict' const multicodec = require('multicodec') + +/** + * @param {string} name + * @returns {number} + */ const nameToCodec = name => multicodec[name.toUpperCase().replace(/-/g, '_')] const { withTimeoutOption } = require('../../utils') +/** + * @param {Object} config + * @param {import('..').IPLD} config.ipld + * @param {import("..").Pin} config.pin + * @param {import("..").GCLock} config.gcLock + * @param {import("..").Preload} config.preload + */ module.exports = ({ ipld, pin, gcLock, preload }) => { - return withTimeoutOption(async function put (dagNode, options) { - options = options || {} - + /** + * Store an IPLD format node + * + * @param {Object} dagNode + * @param {PutOptions & AbortOptions} [options] + * @returns {Promise} + * @example + * ```js + * const obj = { simple: 'object' } + * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha3-512' }) + * + * console.log(cid.toString()) + * // zBwWX9ecx5F4X54WAjmFLErnBT6ByfNxStr5ovowTL7AhaUR98RWvXPS1V3HqV1qs3r5Ec5ocv7eCdbqYQREXNUfYNuKG + * ``` + */ + // eslint-disable-next-line complexity + async function put (dagNode, options = {}) { if (options.cid && (options.format || options.hashAlg)) { throw new Error('Can\'t put dag node. Please provide either `cid` OR `format` and `hashAlg` options.') } else if (((options.format && !options.hashAlg) || (!options.format && options.hashAlg))) { @@ -68,5 +94,21 @@ module.exports = ({ ipld, pin, gcLock, preload }) => { release() } } - }) + } + + return withTimeoutOption(put) } + +/** + * @typedef {Object} PutOptions + * @property {CID} [cid] + * @property {string|number} [format] + * @property {string|number} [hashAlg] + * + * @property {boolean} [pin=false] + * @property {number} [version] + * @property {boolean} [preload=false] + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/dag/resolve.js b/packages/ipfs-core/src/components/dag/resolve.js index c9e8460e11..30a7809e9f 100644 --- a/packages/ipfs-core/src/components/dag/resolve.js +++ b/packages/ipfs-core/src/components/dag/resolve.js @@ -4,8 +4,44 @@ const CID = require('cids') const { withTimeoutOption } = require('../../utils') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') +/** + * @param {Object} config + * @param {import('..').IPLD} config.ipld + * @param {import('..').Preload} config.preload + */ module.exports = ({ ipld, preload }) => { - return withTimeoutOption(async function resolve (ipfsPath, options = {}) { + /** + * Returns the CID and remaining path of the node at the end of the passed IPFS path + * + * @param {CID|string} ipfsPath + * @param {ResolveOptions & AbortOptions} options + * @returns {Promise} + * @example + * ```JavaScript + * // example obj + * const obj = { + * a: 1, + * b: [1, 2, 3], + * c: { + * ca: [5, 6, 7], + * cb: 'foo' + * } + * } + * + * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + * console.log(cid.toString()) + * // bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq + * + * const result = await ipfs.dag.resolve(`${cid}/c/cb`) + * console.log(result) + * // Logs: + * // { + * // cid: CID(bafyreicyer3d34cutdzlsbe2nqu5ye62mesuhwkcnl2ypdwpccrsecfmjq), + * // remainderPath: 'c/cb' + * // } + * ``` + */ + async function resolve (ipfsPath, options = {}) { const { cid, path @@ -52,5 +88,21 @@ module.exports = ({ ipld, preload }) => { cid: lastCid, remainderPath: lastRemainderPath || '' } - }) + } + + return withTimeoutOption(resolve) } + +/** + * @typedef {Object} ResolveOptions + * @property {string} [path] - If `ipfsPath` is a `CID`, you may pass a path here + * @property {boolean} [preload] + * + * @typedef {Object} ResolveResult + * @property {CID} cid - The last CID encountered during the traversal + * @property {string} remainderPath - The path to the end of the IPFS path + * inside the node referenced by the CID + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/dag/tree.js b/packages/ipfs-core/src/components/dag/tree.js index 0f0c223df1..3beedd768e 100644 --- a/packages/ipfs-core/src/components/dag/tree.js +++ b/packages/ipfs-core/src/components/dag/tree.js @@ -3,8 +3,51 @@ const { withTimeoutOption } = require('../../utils') const toCidAndPath = require('ipfs-core-utils/src/to-cid-and-path') +/** + * @param {Object} config + * @param {import('..').IPLD} config.ipld + * @param {import("..").Preload} config.preload + */ module.exports = ({ ipld, preload }) => { - return withTimeoutOption(async function * tree (ipfsPath, options = {}) { // eslint-disable-line require-await + /** + * Enumerate all the entries in a graph + * + * @param {CID} ipfsPath - A DAG node that follows one of the supported IPLD formats + * @param {TreeOptions & AbortOptions} [options] + * @returns {AsyncIterable} + * @example + * ```js + * // example obj + * const obj = { + * a: 1, + * b: [1, 2, 3], + * c: { + * ca: [5, 6, 7], + * cb: 'foo' + * } + * } + * + * const cid = await ipfs.dag.put(obj, { format: 'dag-cbor', hashAlg: 'sha2-256' }) + * console.log(cid.toString()) + * // zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5 + * + * const result = await ipfs.dag.tree('zdpuAmtur968yprkhG9N5Zxn6MFVoqAWBbhUAkNLJs2UtkTq5') + * console.log(result) + * // Logs: + * // a + * // b + * // b/0 + * // b/1 + * // b/2 + * // c + * // c/ca + * // c/ca/0 + * // c/ca/1 + * // c/ca/2 + * // c/cb + * ``` + */ + async function * tree (ipfsPath, options = {}) { // eslint-disable-line require-await const { cid, path @@ -19,5 +62,21 @@ module.exports = ({ ipld, preload }) => { } yield * ipld.tree(cid, options.path, options) - }) + } + + return withTimeoutOption(tree) } + +/** + * @typedef {Object} TreeOptions + * @property {string} [path] - If `ipfsPath` is a `CID`, you may pass a path here + * @property {boolean} [preload] + * + * @typedef {Object} TreeResult + * @property {CID} cid - The last CID encountered during the traversal + * @property {string} remainderPath - The path to the end of the IPFS path + * inside the node referenced by the CID + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/dht.js b/packages/ipfs-core/src/components/dht.js index 0f746715ea..f53865dabb 100644 --- a/packages/ipfs-core/src/components/dht.js +++ b/packages/ipfs-core/src/components/dht.js @@ -6,32 +6,17 @@ const errCode = require('err-code') const { withTimeoutOption } = require('../utils') module.exports = ({ libp2p, repo }) => { - return { + const { get, put, findProvs, findPeer, provide, query } = { /** * Given a key, query the DHT for its best value. * - * @param {Uint8Array} key - * @param {Object} [options] - get options - * @param {number} [options.timeout] - optional timeout + * @param {Uint8Array|string} key + * @param {AbortOptions} [options] - The key associated with the value to find * @returns {Promise} */ - get: withTimeoutOption(async (key, options) => { // eslint-disable-line require-await - options = options || {} - - if (!(key instanceof Uint8Array)) { - try { - key = key.toString().split('/') - .filter(part => part && part !== 'ipfs' && part !== 'ipns') - .shift() - - key = (new CID(key)).bytes - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - } - - return libp2p._dht.get(key, options) - }), + async get (key, options = {}) { // eslint-disable-line require-await + return libp2p._dht.get(normalizeCID(key), options) + }, /** * Write a key/value pair to the DHT. @@ -42,63 +27,63 @@ module.exports = ({ libp2p, repo }) => { * * @param {Uint8Array} key * @param {Uint8Array} value - * @returns {Promise} + * @param {AbortOptions} [options] + * @returns {AsyncIterable} */ - put: withTimeoutOption(async (key, value) => { // eslint-disable-line require-await - if (!(key instanceof Uint8Array)) { - try { - key = key.toString().split('/') - .filter(part => part && part !== 'ipfs' && part !== 'ipns') - .shift() - - key = (new CID(key)).bytes - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - } - - return libp2p._dht.put(key, value) - }), + put (key, value, options) { + return libp2p._dht.put(normalizeCID(key), value) + }, /** - * Find peers in the DHT that can provide a specific value, given a key. + * Find peers in the DHT that can provide a specific value, given a CID. * - * @param {CID} key - They key to find providers for. - * @param {Object} [options] - findProviders options - * @param {number} [options.timeout] - how long the query should maximally run, in milliseconds (default: 60000) - * @param {number} [options.numProviders] - maximum number of providers to find - * @returns {AsyncIterable<{ id: CID, addrs: Multiaddr[] }>} + * @param {CID} cid - They key to find providers for. + * @param {FindProvsOptions & AbortOptions} [options] - findProviders options + * @returns {AsyncIterable} + * + * @example + * ```js + * const providers = ipfs.dht.findProvs('QmdPAhQRxrDKqkGPvQzBvjYe3kU8kiEEAd2J6ETEamKAD9') + * for await (const provider of providers) { + * console.log(provider.id.toString()) + * } + * ``` */ - findProvs: withTimeoutOption(async function * (key, options) { // eslint-disable-line require-await - options = options || {} - - if (typeof key === 'string') { - try { - key = new CID(key) - } catch (err) { - throw errCode(err, 'ERR_INVALID_CID') - } - } - + async * findProvs (cid, options = {}) { if (options.numProviders) { options.maxNumProviders = options.numProviders } - for await (const peer of libp2p._dht.findProviders(key, options)) { + for await (const peer of libp2p._dht.findProviders(normalizeCID(cid), options)) { yield { id: peer.id.toB58String(), addrs: peer.addrs } } - }), + }, /** * Query the DHT for all multiaddresses associated with a `PeerId`. * - * @param {PeerId} peerId - The id of the peer to search for. + * @param {PeerId|CID} peerId - The id of the peer to search for. + * @param {AbortOptions} [options] * @returns {Promise<{id: string, addrs: Multiaddr[]}>} + * @example + * ```js + * const info = await ipfs.dht.findPeer('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt') + * + * console.log(info.id) + * // QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt + * + * info.addrs.forEach(addr => console.log(addr.toString())) + * // '/ip4/147.75.94.115/udp/4001/quic' + * // '/ip6/2604:1380:3000:1f00::1/udp/4001/quic' + * // '/dnsaddr/bootstrap.libp2p.io' + * // '/ip6/2604:1380:3000:1f00::1/tcp/4001' + * // '/ip4/147.75.94.115/tcp/4001' + * ``` */ - findPeer: withTimeoutOption(async peerId => { // eslint-disable-line require-await + async findPeer (peerId, options) { // eslint-disable-line require-await if (typeof peerId === 'string') { peerId = PeerId.createFromCID(peerId) } @@ -109,19 +94,17 @@ module.exports = ({ libp2p, repo }) => { id: peer.id.toB58String(), addrs: peer.multiaddrs } - }), + }, /** * Announce to the network that we are providing given values. * * @param {CID|CID[]} cids - The keys that should be announced. - * @param {Object} [options] - provide options - * @param {bool} [options.recursive=false] - Provide not only the given object but also all objects linked from it. - * @returns {Promise} + * @param {ProvideOptions & AbortOptions} [options] - provide options + * @returns {AsyncIterable} */ - provide: withTimeoutOption(async function * (cids, options) { + async * provide (cids, options = {}) { cids = Array.isArray(cids) ? cids : [cids] - options = options || {} for (var i in cids) { if (typeof cids[i] === 'string') { @@ -149,15 +132,16 @@ module.exports = ({ libp2p, repo }) => { for (const cid of cids) { yield libp2p._dht.provide(cid) } - }), + }, /** * Find the closest peers to a given `PeerId`, by querying the DHT. * * @param {string|PeerId} peerId - The `PeerId` to run the query against. + * @param {AbortOptions} [options] * @returns {AsyncIterable<{ id: CID, addrs: Multiaddr[] }>} */ - query: withTimeoutOption(async function * (peerId) { + async * query (peerId, options) { if (typeof peerId === 'string') { peerId = PeerId.createFromCID(peerId) } @@ -168,6 +152,63 @@ module.exports = ({ libp2p, repo }) => { addrs: [] // TODO: get addrs? } } - }) + } + } + + return { + get: withTimeoutOption(get), + put: withTimeoutOption(put), + findProvs: withTimeoutOption(findProvs), + findPeer: withTimeoutOption(findPeer), + provide: withTimeoutOption(provide), + query: withTimeoutOption(query) } } + +/** + * Turns given cid in some stringifyeable represenation, to Uint8Array + * representation. Throws an error if given value isn't a vaild CID. + * + * @param {any} cid + * @returns {Uint8Array} + */ +const parseCID = cid => { + try { + const cidStr = cid.toString().split('/') + .filter(part => part && part !== 'ipfs' && part !== 'ipns')[0] + + return (new CID(cidStr)).bytes + } catch (error) { + throw errCode(error, 'ERR_INVALID_CID') + } +} + +/** + * Turns given cid in some represenation to Uint8Array reperesentation. + * + * @param {any} cid + */ +const normalizeCID = cid => + cid instanceof Uint8Array ? cid : parseCID(cid) + +/** + * @typedef {Object} QueryEvent + * @property {PeerId} id + * @property {number} type + * @property {string} extra + * @property {PeerInfo[]} responses + * + * @typedef {Object} ProvideOptions + * @property {boolean} [recursive=false] - Provide not only the given object but also all objects linked from it. + * + * @typedef {Object} FindProvsOptions + * @property {number} [numProviders] - maximum number of providers to find + * @property {number} [maxNumProviders] + * + * @typedef {Object} PeerInfo + * @property {PeerId} id + * @property {Multiaddr[]} addrs + * + * @typedef {import('multiaddr')} Multiaddr + * @typedef {import('../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/dns.js b/packages/ipfs-core/src/components/dns.js index 6a76890389..0c77219e11 100644 --- a/packages/ipfs-core/src/components/dns.js +++ b/packages/ipfs-core/src/components/dns.js @@ -4,6 +4,10 @@ const dns = require('../runtime/dns-nodejs') const { withTimeoutOption } = require('../utils') +/** + * @param {string} domain + * @returns {string} + */ function fqdnFixups (domain) { // Allow resolution of .eth names via .eth.link // More context at the go-ipfs counterpart: https://github.com/ipfs/go-ipfs/pull/6448 @@ -14,15 +18,31 @@ function fqdnFixups (domain) { } module.exports = () => { - return withTimeoutOption(async (domain, opts) => { // eslint-disable-line require-await - opts = opts || {} - + /** + * Resolve DNS links + * + * @param {string} domain + * @param {DNSOptions} [options] + * @returns {Promise} + */ + const resolveDNS = async (domain, options = {}) => { // eslint-disable-line require-await if (typeof domain !== 'string') { throw new Error('Invalid arguments, domain must be a string') } domain = fqdnFixups(domain) - return dns(domain, opts) - }) + return dns(domain, options) + } + + return withTimeoutOption(resolveDNS) } + +/** + * @typedef {DNSSettings & AbortOptions} DNSOptions + * + * @typedef {Object} DNSSettings + * @property {boolean} [recursive=true] - Resolve until result is not a domain name + * + * @typedef {import('../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/chmod.js b/packages/ipfs-core/src/components/files/chmod.js index c814b11f5c..b23174ee78 100644 --- a/packages/ipfs-core/src/components/files/chmod.js +++ b/packages/ipfs-core/src/components/files/chmod.js @@ -1,6 +1,6 @@ 'use strict' -const applyDefaultOptions = require('./utils/apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const log = require('debug')('ipfs:mfs:touch') const errCode = require('err-code') @@ -97,7 +97,7 @@ function parseSymbolicMode (input, originalMode, isDirectory) { } let [ - _, // eslint-disable-line no-unused-vars + , references, operator, mode @@ -149,7 +149,11 @@ function parseSymbolicMode (input, originalMode, isDirectory) { } function calculateMode (mode, metadata) { - if (typeof mode === 'string' || mode instanceof String) { + if (mode instanceof String) { + mode = mode.toString() + } + + if (typeof mode === 'string') { if (mode.match(/^\d+$/g)) { mode = parseInt(mode, 8) } else { @@ -163,8 +167,14 @@ function calculateMode (mode, metadata) { } module.exports = (context) => { - return withTimeoutOption(async function mfsChmod (path, mode, options) { - options = applyDefaultOptions(options, defaultOptions) + /** + * @param {string} path + * @param {string | number} mode + * @param {ChmodOptions & AbortOptions} [options] + * @returns {Promise} + */ + async function mfsChmod (path, mode, options = {}) { + const opts = mergeOptions(defaultOptions, options) log(`Fetching stats for ${path}`) @@ -172,13 +182,13 @@ module.exports = (context) => { cid, mfsDirectory, name - } = await toMfsPath(context, path, options) + } = await toMfsPath(context, path, opts) if (cid.codec !== 'dag-pb') { throw errCode(new Error(`${path} was not a UnixFS node`), 'ERR_NOT_UNIXFS') } - if (options.recursive) { + if (opts.recursive) { // recursively export from root CID, change perms of each entry then reimport // but do not reimport files, only manipulate dag-pb nodes const root = await pipe( @@ -195,12 +205,12 @@ module.exports = (context) => { } }, (source) => importer(source, context.block, { - ...options, + ...opts, pin: false, - dagBuilder: async function * (source, block, options) { + dagBuilder: async function * (source, block, opts) { for await (const entry of source) { yield async function () { - const cid = await persist(entry.content.serialize(), block, options) + const cid = await persist(entry.content.serialize(), block, opts) return { cid, @@ -216,10 +226,10 @@ module.exports = (context) => { ) // remove old path from mfs - await rm(context)(path, options) + await rm(context)(path, opts) // add newly created tree to mfs at path - await cp(context)(`/ipfs/${root.cid}`, path, options) + await cp(context)(`/ipfs/${root.cid}`, path, opts) return } @@ -231,11 +241,11 @@ module.exports = (context) => { const updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: cid.version, - hashAlg: mh.names[options.hashAlg], - onlyHash: !options.flush + hashAlg: mh.names[opts.hashAlg || defaultOptions.hashAlg], + onlyHash: !opts.flush }) - const trail = await toTrail(context, mfsDirectory, options) + const trail = await toTrail(context, mfsDirectory) const parent = trail[trail.length - 1] const parentNode = await context.ipld.get(parent.cid) @@ -244,17 +254,31 @@ module.exports = (context) => { name: name, cid: updatedCid, size: node.serialize().length, - flush: options.flush, - hashAlg: options.hashAlg, + flush: opts.flush, + hashAlg: opts.hashAlg, cidVersion: cid.version }) parent.cid = result.cid // update the tree with the new child - const newRootCid = await updateTree(context, trail, options) + const newRootCid = await updateTree(context, trail, opts) // Update the MFS record with the new CID for the root of the tree - await updateMfsRoot(context, newRootCid, options) - }) + await updateMfsRoot(context, newRootCid, opts) + } + + return withTimeoutOption(mfsChmod) } + +/** + * @typedef {Object} ChmodOptions + * @property {boolean} [flush=false] + * @property {number} [shardSplitThreshold=1000] + * @property {string} [hashAlg=sha2-256] + * @property {0|1} [cidVersion=0] + * @property {boolean} [recursive=false] + * + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/cp.js b/packages/ipfs-core/src/components/files/cp.js index 4481c78902..aaee076f72 100644 --- a/packages/ipfs-core/src/components/files/cp.js +++ b/packages/ipfs-core/src/components/files/cp.js @@ -21,8 +21,15 @@ const defaultOptions = { signal: undefined } -module.exports = (context) => { - return withTimeoutOption(async function mfsCp (...args) { +/** + * @param {any} context + */ +module.exports = function derp (context) { + /** + * @param {[...from:From, options?:CpOptions]} args + * @returns {Promise} + */ + async function mfsCp (...args) { let { sources, destination, @@ -83,7 +90,7 @@ module.exports = (context) => { } const destinationPath = isDirectory(destination) ? destination.mfsPath : destination.mfsDirectory - const trail = await toTrail(context, destinationPath, options) + const trail = await toTrail(context, destinationPath) if (sources.length === 1) { const source = sources.pop() @@ -96,7 +103,9 @@ module.exports = (context) => { log('Multiple sources, wrapping in a directory') return copyToDirectory(context, sources, destination, trail, options) - }) + } + + return withTimeoutOption(mfsCp) } const isDirectory = (destination) => { @@ -158,3 +167,17 @@ const addSourceToParent = async (context, source, childName, parent, options) => return parent } + +/** + * @typedef {Object} CpOptions + * @property {boolean} [flush=false] + * @property {number} [shardSplitThreshold=1000] + * @property {string} [hashAlg=sha2-256] + * @property {0|1} [cidVersion=0] + * @property {boolean} [parents=false] + * + * @typedef {import('./utils/types').Tuple} From + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/flush.js b/packages/ipfs-core/src/components/files/flush.js index f3c5798cc2..d3d57983a0 100644 --- a/packages/ipfs-core/src/components/files/flush.js +++ b/packages/ipfs-core/src/components/files/flush.js @@ -1,19 +1,34 @@ 'use strict' -const applyDefaultOptions = require('./utils/apply-default-options') const stat = require('./stat') const { withTimeoutOption } = require('../../utils') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const defaultOptions = { + timeout: undefined, signal: undefined } module.exports = (context) => { - return withTimeoutOption(async function mfsFlush (path, options = {}) { - options = applyDefaultOptions(options, defaultOptions) + /** + * Flush a given path's data to disk + * + * @param {string} path + * @param {AbortOptions} [options] + * @returns {Promise} The CID of the path that has been flushed + */ + async function mfsFlush (path, options = {}) { + options = mergeOptions(defaultOptions, options) const { cid } = await stat(context)(path, options) return cid - }) + } + + return withTimeoutOption(mfsFlush) } + +/** + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/index.js b/packages/ipfs-core/src/components/files/index.js index c1acf3e0ba..e5f29180de 100644 --- a/packages/ipfs-core/src/components/files/index.js +++ b/packages/ipfs-core/src/components/files/index.js @@ -3,6 +3,21 @@ const createLock = require('./utils/create-lock') const isIpfs = require('is-ipfs') +/** + * @typedef {Object} MFS + * @property {ReturnType} stat + * @property {ReturnType} chmod + * @property {ReturnType} cp + * @property {ReturnType} flush + * @property {ReturnType} mkdir + * @property {ReturnType} mv + * @property {ReturnType} rm + * @property {ReturnType} touch + * @property {ReturnType} write + * @property {ReturnType} read + * @property {ReturnType} ls + */ + // These operations are read-locked at the function level and will execute simultaneously const readOperations = { stat: require('./stat') @@ -76,6 +91,16 @@ function createMfs (options) { return mfs } +/** + * @param {Object} context + * @param {import('..').IPLD} context.ipld + * @param {import('..').IPLDBlock} context.block + * @param {import('..').IPFSBlockService} context.blockService + * @param {import('..').IPFSRepo} context.repo + * @param {import('..').Preload} context.preload + * @param {import('../init').ConstructorOptions} context.options + * @returns {MFS} + */ module.exports = ({ ipld, block, blockService, repo, preload, options: constructorOptions }) => { const methods = createMfs({ ipld, @@ -100,179 +125,16 @@ module.exports = ({ ipld, block, blockService, repo, preload, options: construct return { ...methods, - - /** - * Change file mode - * - * @param {string} path - The path of the source to modify. - * @param {Object} mode - The mode to set the path - * @param {Object} [opts] - Options for modification. - * @param {boolean} [opts.recursive=false] - Whether to change modes recursively. (default: false) - * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). - * @param {number} [opts.shardSplitThreshold] - If the modified path has more than this many links it will be turned into a HAMT shard - * @returns {Promise} - */ chmod: methods.chmod, - - /** - * Copy files - * - * @param {string | string[]} from - The path(s) of the source to copy. - * @param {string} to - The path of the destination to copy to. - * @param {Object} [opts] - Options for copy. - * @param {boolean} [opts.parents=false] - Whether or not to make the parent directories if they don't exist. (default: false) - * @param {string} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb) - * @param {string} [opts.hashAlg=sha2-256] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} - * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). - * @returns {Promise} - */ cp: withPreload(methods.cp), - - /** - * Make a directory - * - * @param {string} path - The path to the directory to make. - * @param {Object} [opts] - Options for mkdir. - * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) - * @param {string} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). - * @param {string} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} - * @param {boolean} [opts.flush=true] - Whether or not to immediately flush MFS changes to disk (default: true). - * @returns {Promise} - */ mkdir: methods.mkdir, - - /** - * @typedef {Object} StatOutput - * @property {string} hash - Output hash. - * @property {number} size - File size in bytes. - * @property {number} cumulativeSize - Integer with the size of the DAGNodes making up the file in Bytes. - * @property {string} type - Output type either 'directory' or 'file'. - * @property {number} blocks - If type is directory, this is the number of files in the directory. If it is file it is the number of blocks that make up the file. - * @property {boolean} withLocality - Indicate if locality information is present. - * @property {boolean} local - Indicate if the queried dag is fully present locally. - * @property {number} sizeLocal - Integer indicating the cumulative size of the data present locally. - */ - - /** - * Get file or directory status. - * - * @param {string} path - Path to the file or directory to stat. - * @param {Object} [opts] - Options for stat. - * @param {boolean} [opts.hash=false] - Return only the hash. (default: false) - * @param {boolean} [opts.size=false] - Return only the size. (default: false) - * @param {boolean} [opts.withLocal=false] - Compute the amount of the dag that is local, and if possible the total size. (default: false) - * @returns {Promise} - */ stat: withPreload(methods.stat), - - /** - * Remove a file or directory. - * - * @param {string | string[]} paths - One or more paths to remove. - * @param {Object} [opts] - Options for remove. - * @param {boolean} [opts.recursive=false] - Whether or not to remove directories recursively. (default: false) - * @returns {Promise} - */ rm: methods.rm, - - /** - * @typedef {Object} ReadOptions - * @property {number} [opts.offset=0] - Integer with the byte offset to begin reading from (default: 0). - * @property {number} [opts.length] - Integer with the maximum number of bytes to read (default: Read to the end of stream). - */ - - /** - * Read a file into a Buffer. - * - * @param {string} path - Path of the file to read and must point to a file (and not a directory). - * @param {ReadOptions} [opts] - Object for read. - * @returns {AsyncIterable} - */ read: withPreload(methods.read), - - /** - * Update modification time - * - * @param {string} path - The path of the source to modify. - * @param {number} mtime - Time to use as the new modification time in seconds since (+ve) or before (-ve) the Unix Epoch - * @param {Object} [opts] - Options for touch. - * @param {boolean} [opts.parents=false] - Whether or not to make the parent directories if they don't exist. (default: false) - * @param {number} [opts.cidVersion=0] - CID version to use with the newly updated node - * @param {number} [opts.shardSplitThreshold] - If the modified path has more than this many links it will be turned into a HAMT shard - * @returns {Promise} - */ touch: methods.touch, - - /** - * Write to a file. - * - * @param {string} path - Path of the file to write. - * @param {Buffer | PullStream | ReadableStream | Blob | string} content - Content to write. - * @param {Object} opts - Options for write. - * @param {number} [opts.offset=0] - Integer with the byte offset to begin writing at. (default: 0) - * @param {boolean} [opts.create=false] - Indicate to create the file if it doesn't exist. (default: false) - * @param {boolean} [opts.truncate=false] - Indicate if the file should be truncated after writing all the bytes from content. (default: false) - * @param {boolena} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) - * @param {number} [opts.length] - Maximum number of bytes to read. (default: Read all bytes from content) - * @param {boolean} [opts.rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf. (default: false) - * @param {number} [opts.cidVersion=0] - The CID version to use when storing the data (storage keys are based on the CID, including its version). (default: 0) - * @returns {Promise} - */ write: methods.write, - - /** - * Move files. - * - * @param {string | Array} from - Path(s) of the source to move. - * @param {string} to - Path of the destination to move to. - * @param {Object} opts - Options for mv. - * @param {boolean} [opts.parents=false] - Value to decide whether or not to make the parent directories if they don't exist. (default: false) - * @param {string} [opts.format=dag-pb] - Format of nodes to write any newly created directories as. (default: dag-pb). - * @param {string} [opts.hashAlg] - Algorithm to use when creating CIDs for newly created directories. (default: sha2-256) {@link https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343 The list of all possible values} - * @param {boolean} [opts.flush=true] - Value to decide whether or not to immediately flush MFS changes to disk. (default: true) - * @returns {Promise} - * @description - * If from has multiple values then to must be a directory. - * - * If from has a single value and to exists and is a directory, from will be moved into to. - * - * If from has a single value and to exists and is a file, from must be a file and the contents of to will be replaced with the contents of from otherwise an error will be returned. - * - * If from is an IPFS path, and an MFS path exists with the same name, the IPFS path will be chosen. - * - * All values of from will be removed after the operation is complete unless they are an IPFS path. - */ mv: withPreload(methods.mv), - - /** - * Flush a given path's data to the disk. - * - * @param {string | Array} [paths] - String paths to flush. (default: /) - * @returns {Promise} - */ flush: methods.flush, - - /** - * @typedef {Object} ListOutputFile - * @property {string} name - Which is the file's name. - * @property {string} type - Which is the object's type (directory or file). - * @property {number} size - The size of the file in bytes. - * @property {string} hash - The hash of the file. - */ - - /** - * @typedef {Object} ListOptions - * @property {boolean} [long=false] - Value to decide whether or not to populate type, size and hash. (default: false) - * @property {boolean} [sort=false] - If true entries will be sorted by filename. (default: false) - */ - - /** - * List directories in the local mutable namespace. - * - * @param {string} [path="/"] - String to show listing for. (default: /) - * @param {ListOptions} [opts] - Options for list. - * @returns {AsyncIterable} - */ ls: withPreload(async function * (...args) { for await (const file of methods.ls(...args)) { yield { ...file, size: file.size || 0 } diff --git a/packages/ipfs-core/src/components/files/ls.js b/packages/ipfs-core/src/components/files/ls.js index 0792efddf0..195002025f 100644 --- a/packages/ipfs-core/src/components/files/ls.js +++ b/packages/ipfs-core/src/components/files/ls.js @@ -7,6 +7,10 @@ const { withTimeoutOption } = require('../../utils') +/** + * @param {*} fsEntry + * @returns {UnixFSEntry} + */ const toOutput = (fsEntry) => { let type = 0 let size = fsEntry.node.size || fsEntry.node.length @@ -39,7 +43,23 @@ const toOutput = (fsEntry) => { } module.exports = (context) => { - return withTimeoutOption(async function * mfsLs (path, options = {}) { + /** + * List directories in the local mutable namespace + * + * @param {string} path + * @param {AbortOptions} [options] + * @returns {AsyncIterable} + * @example + * + * ```js + * for await (const file of ipfs.files.ls('/screenshots')) { + * console.log(file.name) + * } + * // 2018-01-22T18:08:46.775Z.png + * // 2018-01-22T18:08:49.184Z.png + * ``` + */ + async function * mfsLs (path, options = {}) { const mfsPath = await toMfsPath(context, path, options) const fsDir = await exporter(mfsPath.mfsPath, context.ipld) @@ -54,5 +74,25 @@ module.exports = (context) => { for await (const fsEntry of fsDir.content(options)) { yield toOutput(fsEntry) } - }) + } + + return withTimeoutOption(mfsLs) } + +/** + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + * + * @typedef {object} UnixTimeObj + * @property {number} secs - the number of seconds since (positive) or before + * (negative) the Unix Epoch began + * @property {number} [nsecs] - the number of nanoseconds since the last full + * second. + * + * @typedef {object} UnixFSEntry + * @property {CID} cid + * @property {number} [mode] + * @property {UnixTimeObj} [mtime] + * @property {number} size + * @property {number} type + */ diff --git a/packages/ipfs-core/src/components/files/mkdir.js b/packages/ipfs-core/src/components/files/mkdir.js index ab60f7fcb0..be6a59760a 100644 --- a/packages/ipfs-core/src/components/files/mkdir.js +++ b/packages/ipfs-core/src/components/files/mkdir.js @@ -9,7 +9,7 @@ const updateMfsRoot = require('./utils/update-mfs-root') const updateTree = require('./utils/update-tree') const addLink = require('./utils/add-link') const withMfsRoot = require('./utils/with-mfs-root') -const applyDefaultOptions = require('./utils/apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const { withTimeoutOption } = require('../../utils') const defaultOptions = { @@ -24,8 +24,19 @@ const defaultOptions = { } module.exports = (context) => { - return withTimeoutOption(async function mfsMkdir (path, options) { - options = applyDefaultOptions(options, defaultOptions) + /** + * Make a directory in your MFS + * + * @param {string} path + * @param {MkdirOptions & AbortOptions} [options] + * @returns {Promise} + * @example + * ```js + * await ipfs.files.mkdir('/my/beautiful/directory') + * ``` + */ + async function mfsMkdir (path, options = {}) { + const opts = mergeOptions(defaultOptions, options) if (!path) { throw new Error('no path given to Mkdir') @@ -34,7 +45,7 @@ module.exports = (context) => { path = path.trim() if (path === '/') { - if (options.parents) { + if (opts.parents) { return } @@ -53,10 +64,10 @@ module.exports = (context) => { throw errCode(new Error("path cannot have the prefix 'ipfs'"), 'ERR_INVALID_PATH') } - const root = await withMfsRoot(context, options) + const root = await withMfsRoot(context, opts) let parent const trail = [] - const emptyDir = await createNode(context, 'directory', options) + const emptyDir = await createNode(context, 'directory', opts) // make sure the containing folder exists, creating it if necessary for (let i = 0; i <= pathComponents.length; i++) { @@ -69,7 +80,7 @@ module.exports = (context) => { log(`${subPath} had children ${parent.node.Links.map(link => link.Name)}`) if (i === pathComponents.length) { - if (options.parents) { + if (opts.parents) { return } @@ -82,12 +93,12 @@ module.exports = (context) => { }) } catch (err) { if (err.code === 'ERR_NOT_FOUND') { - if (i < pathComponents.length && !options.parents) { + if (i < pathComponents.length && !opts.parents) { throw errCode(new Error(`Intermediate directory path ${subPath} does not exist, use the -p flag to create it`), 'ERR_NOT_FOUND') } // add the intermediate directory - await addEmptyDir(context, subPathComponents[subPathComponents.length - 1], emptyDir, trail[trail.length - 1], trail, options) + await addEmptyDir(context, subPathComponents[subPathComponents.length - 1], emptyDir, trail[trail.length - 1], trail, opts) } else { throw err } @@ -98,11 +109,13 @@ module.exports = (context) => { // await addEmptyDir(context, pathComponents[pathComponents.length - 1], emptyDir, parent, trail) // update the tree from the leaf to the root - const newRootCid = await updateTree(context, trail, options) + const newRootCid = await updateTree(context, trail, opts) // Update the MFS record with the new CID for the root of the tree - await updateMfsRoot(context, newRootCid, options) - }) + await updateMfsRoot(context, newRootCid, opts) + } + + return withTimeoutOption(mfsMkdir) } const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) => { @@ -126,3 +139,18 @@ const addEmptyDir = async (context, childName, emptyDir, parent, trail, options) cid: emptyDir.cid }) } + +/** + * @typedef {Object} MkdirOptions + * @property {boolean} [parents=false] - If true, create intermediate directories + * @property {number} [mode] - An integer that represents the file mode + * @property {Mtime|Hrtime|Date} [mtime] - A Date object, an object with `{ secs, nsecs }` properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of `process.hrtime() + * @property {boolean} [flush] - If true the changes will be immediately flushed to disk + * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries + * @property {0|1} [cidVersion=0] - The CID version to use for any updated entries + * + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('../../utils').Mtime} Mtime + * @typedef {import('../../utils').Hrtime} Hrtime + */ diff --git a/packages/ipfs-core/src/components/files/mv.js b/packages/ipfs-core/src/components/files/mv.js index 4735d72c3e..5f65e96e77 100644 --- a/packages/ipfs-core/src/components/files/mv.js +++ b/packages/ipfs-core/src/components/files/mv.js @@ -15,8 +15,17 @@ const defaultOptions = { signal: undefined } +/** + * + * @param {any} context + */ module.exports = (context) => { - return withTimeoutOption(async function mfsMv (...args) { + /** + * + * @param {[...from:From, to:string, options?:MvOptions]} args + * @returns {Promise} + */ + async function mfsMv (...args) { const { sources, options @@ -35,5 +44,19 @@ module.exports = (context) => { await cp(context).apply(null, cpArgs) await rm(context).apply(null, rmArgs) - }) + } + + return withTimeoutOption(mfsMv) } + +/** + * @typedef {Object} MvOptions + * @property {boolean} [parents=false] + * @property {boolean} [flush=false] + * @property {string} [hashAlg='sha2-256'] + * @property {0|1} [cidVersion] + * + * @typedef {import('./utils/types').Tuple} From + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/read.js b/packages/ipfs-core/src/components/files/read.js index 3bdfbbddc3..e5cf791e51 100644 --- a/packages/ipfs-core/src/components/files/read.js +++ b/packages/ipfs-core/src/components/files/read.js @@ -1,7 +1,7 @@ 'use strict' const exporter = require('ipfs-unixfs-exporter') -const applyDefaultOptions = require('./utils/apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const errCode = require('err-code') const { withTimeoutOption } = require('../../utils') @@ -12,9 +12,30 @@ const defaultOptions = { signal: undefined } +/** + * @param {any} context + */ module.exports = (context) => { - return withTimeoutOption(function mfsRead (path, options = {}) { - options = applyDefaultOptions(options, defaultOptions) + /** + * Read a file + * + * @param {string | CID} path - An MFS path, IPFS Path or CID to read + * @param {ReadOptions & AbortOptions} [options] + * @returns {AsyncIterable} + * @example + * ```js + * const chunks = [] + * + * for await (const chunk of ipfs.files.read('/hello-world')) { + * chunks.push(chunk) + * } + * + * console.log(uint8ArrayConcat(chunks).toString()) + * // Hello, World! + * ``` + */ + function mfsRead (path, options = {}) { + options = mergeOptions(defaultOptions, options) return { [Symbol.asyncIterator]: async function * read () { @@ -37,5 +58,16 @@ module.exports = (context) => { } } } - }) + } + + return withTimeoutOption(mfsRead) } + +/** + * @typedef {Object} ReadOptions + * @property {number} [offset] - An offset to start reading the file from + * @property {number} [length] - An optional max length to read from the file + * + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/rm.js b/packages/ipfs-core/src/components/files/rm.js index 68a58aaca6..859203665c 100644 --- a/packages/ipfs-core/src/components/files/rm.js +++ b/packages/ipfs-core/src/components/files/rm.js @@ -17,8 +17,17 @@ const defaultOptions = { signal: undefined } +/** + * @param {any} context + */ module.exports = (context) => { - return withTimeoutOption(async function mfsRm (...args) { + /** + * Remove a file or directory + * + * @param {[...paths: Paths, options?:RmOptions]} args + * @returns {Promise} + */ + async function mfsRm (...args) { const { sources, options @@ -37,13 +46,16 @@ module.exports = (context) => { for (const source of sources) { await removePath(context, source.path, options) } - }) + } + + return withTimeoutOption(mfsRm) } const removePath = async (context, path, options) => { const mfsPath = await toMfsPath(context, path, options) - const trail = await toTrail(context, mfsPath.mfsPath, options) - const child = trail.pop() + const trail = await toTrail(context, mfsPath.mfsPath) + const child = trail[trail.length - 1] + trail.pop() const parent = trail[trail.length - 1] if (!parent) { @@ -72,3 +84,15 @@ const removePath = async (context, path, options) => { // Update the MFS record with the new CID for the root of the tree await updateMfsRoot(context, newRootCid, options) } + +/** + * @typedef {Object} RmOptions + * @property {boolean} [recursive=false] - If true all paths under the specifed path(s) will be removed + * @property {boolean} [flush=false] - If true the changes will be immediately flushed to disk + * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries + * @property {0|1} [cidVersion] - The CID version to use for any updated entries + * + * @typedef {import('..').CID} CID + * @typedef {import('./utils/types').Tuple} Paths + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/stat.js b/packages/ipfs-core/src/components/files/stat.js index 5cba1eee6a..6a038b57f5 100644 --- a/packages/ipfs-core/src/components/files/stat.js +++ b/packages/ipfs-core/src/components/files/stat.js @@ -1,6 +1,6 @@ 'use strict' -const applyDefaultOptions = require('./utils/apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const exporter = require('ipfs-unixfs-exporter') const log = require('debug')('ipfs:mfs:stat') @@ -12,9 +12,20 @@ const defaultOptions = { signal: undefined } +/** + * @param {Object} context + * @param {import('..').IPLD} context.ipld + */ module.exports = (context) => { - return withTimeoutOption(async function mfsStat (path, options) { - options = applyDefaultOptions(options, defaultOptions) + /** + * Get file or directory statistics + * + * @param {string} path - The MFS path return statistics from + * @param {StatOptions & AbortOptions} [options] + * @returns {Promise} - An object containing the file/directory status + */ + async function mfsStat (path, options) { + options = mergeOptions(defaultOptions, options) log(`Fetching stats for ${path}`) @@ -41,11 +52,18 @@ module.exports = (context) => { throw new Error(`Cannot stat codec ${file.cid.codec}`) } - return statters[file.cid.codec](file, options) - }) + return statters[file.cid.codec](file) + } + + return withTimeoutOption(mfsStat) } +/** @type {Record Stat>} */ const statters = { + /** + * @param {any} file + * @returns {Stat} + */ raw: (file) => { return { cid: file.cid, @@ -58,13 +76,19 @@ const statters = { withLocality: false } }, + /** + * @param {any} file + * @returns {Stat} + */ 'dag-pb': (file) => { const blocks = file.node.Links.length const size = file.node.size const cumulativeSize = file.node.size + /** @type {Stat} */ const output = { cid: file.cid, + type: 'file', size: size, cumulativeSize: cumulativeSize, blocks: blocks, @@ -101,7 +125,13 @@ const statters = { return output }, + /** + * @param {any} file + * @returns {Stat} + */ 'dag-cbor': (file) => { + // @ts-ignore - This is incompatible with Stat object + // @TODO - https://github.com/ipfs/js-ipfs/issues/3325 return { cid: file.cid, local: undefined, @@ -109,6 +139,10 @@ const statters = { withLocality: false } }, + /** + * @param {any} file + * @returns {Stat} + */ identity: (file) => { return { cid: file.cid, @@ -122,3 +156,32 @@ const statters = { } } } + +/** + * @typedef {Object} StatOptions + * @property {boolean} [hash=false] - If true, return only the CID + * @property {boolean} [size=false] - If true, return only the size + * @property {boolean} [withLocal=false] - If true, compute the amount of the DAG that is local and if possible the total size + * + * @typedef {Object} Stat + * @property {CID} cid - Content idenntifier + * @property {number} size - An integer with the file size in bytes. + * @property {number} cumulativeSize - An integer with the size of the + * DAGNodes making up the file in bytes. + * @property {'directory'|'file'} type - Type of the file which is either directory + * or file. + * @property {number} blocks - If type is directory, this is the number of files + * in the directory. If it is file it is the number of blocks that make up the + * file. + * @property {boolean} [withLocality] - A boolean to indicate if locality + * information is present. + * @property {boolean} [local] - Is a boolean to indicate if the queried dag is + * fully present locally. + * @property {number} [sizeLocal] - An integer indicating the cumulative size of + * the data present locally. + * @property {number} [mode] - File mode + * @property {import('../add-all').MTime} [mtime] - Modification time + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/files/touch.js b/packages/ipfs-core/src/components/files/touch.js index 3e4809ffa3..77f2eda055 100644 --- a/packages/ipfs-core/src/components/files/touch.js +++ b/packages/ipfs-core/src/components/files/touch.js @@ -1,6 +1,6 @@ 'use strict' -const applyDefaultOptions = require('./utils/apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const toMfsPath = require('./utils/to-mfs-path') const log = require('debug')('ipfs:mfs:touch') const errCode = require('err-code') @@ -15,6 +15,7 @@ const mh = require('multihashing-async').multihash const { withTimeoutOption } = require('../../utils') const defaultOptions = { + /** @type {UnixTime|undefined} */ mtime: undefined, flush: true, shardSplitThreshold: 1000, @@ -24,35 +25,51 @@ const defaultOptions = { } module.exports = (context) => { - return withTimeoutOption(async function mfsTouch (path, options) { - options = options || {} - options = applyDefaultOptions(options, defaultOptions) - options.mtime = options.mtime || new Date() - - log(`Touching ${path} mtime: ${options.mtime}`) + /** + * Update the mtime of a file or directory + * + * @param {string} path - The MFS path to update the mtime for + * @param {TouchOptions & AbortOptions} [options] + * @returns {Promise} + * + * @example + * ```js + * // set the mtime to the current time + * await ipfs.files.touch('/path/to/file.txt') + * // set the mtime to a specific time + * await ipfs.files.touch('/path/to/file.txt', { + * mtime: new Date('May 23, 2014 14:45:14 -0700') + * }) + * ``` + */ + async function mfsTouch (path, options = {}) { + const settings = mergeOptions(defaultOptions, options) + settings.mtime = settings.mtime || new Date() + + log(`Touching ${path} mtime: ${settings.mtime}`) const { cid, mfsDirectory, name, exists - } = await toMfsPath(context, path, options) + } = await toMfsPath(context, path, settings) let node let updatedCid - let cidVersion = options.cidVersion + let cidVersion = settings.cidVersion if (!exists) { const metadata = new UnixFS({ type: 'file', - mtime: options.mtime + mtime: settings.mtime }) node = new DAGNode(metadata.marshal()) updatedCid = await context.ipld.put(node, mc.DAG_PB, { - cidVersion: options.cidVersion, + cidVersion: settings.cidVersion, hashAlg: mh.names['sha2-256'], - onlyHash: !options.flush + onlyHash: !settings.flush }) } else { if (cid.codec !== 'dag-pb') { @@ -64,18 +81,18 @@ module.exports = (context) => { node = await context.ipld.get(cid) const metadata = UnixFS.unmarshal(node.Data) - metadata.mtime = options.mtime + metadata.mtime = settings.mtime node = new DAGNode(metadata.marshal(), node.Links) updatedCid = await context.ipld.put(node, mc.DAG_PB, { cidVersion: cid.version, hashAlg: mh.names['sha2-256'], - onlyHash: !options.flush + onlyHash: !settings.flush }) } - const trail = await toTrail(context, mfsDirectory, options) + const trail = await toTrail(context, mfsDirectory) const parent = trail[trail.length - 1] const parentNode = await context.ipld.get(parent.cid) @@ -84,8 +101,8 @@ module.exports = (context) => { name: name, cid: updatedCid, size: node.serialize().length, - flush: options.flush, - shardSplitThreshold: options.shardSplitThreshold, + flush: settings.flush, + shardSplitThreshold: settings.shardSplitThreshold, hashAlg: 'sha2-256', cidVersion }) @@ -93,9 +110,25 @@ module.exports = (context) => { parent.cid = result.cid // update the tree with the new child - const newRootCid = await updateTree(context, trail, options) + const newRootCid = await updateTree(context, trail, settings) // Update the MFS record with the new CID for the root of the tree - await updateMfsRoot(context, newRootCid, options) - }) + await updateMfsRoot(context, newRootCid, settings) + } + + return withTimeoutOption(mfsTouch) } + +/** + * @typedef {Object} TouchOptions + * @property {UnixTime} [mtime] - A Date object, an object with `{ secs, nsecs }` properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of `process.hrtime()` + * @property {boolean} [flush=false] - If true the changes will be immediately flushed to disk + * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries + * @property {0|1} [cidVersion] - The CID version to use for any updated entries + * + * @typedef {import('cids')} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('../../utils').Mtime} Mtime + * @typedef {import('../../utils').Hrtime} Hrtime + * @typedef {import('ipfs-core-utils/src/files/normalise-input/normalise-input').UnixTime} UnixTime + */ diff --git a/packages/ipfs-core/src/components/files/utils/apply-default-options.js b/packages/ipfs-core/src/components/files/utils/apply-default-options.js deleted file mode 100644 index 6c88e5a1f9..0000000000 --- a/packages/ipfs-core/src/components/files/utils/apply-default-options.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict' - -const errCode = require('err-code') - -module.exports = (options = {}, defaults) => { - if (Array.isArray(options)) { - options = options.filter(arg => typeof arg === 'object').pop() || {} - } - - const output = {} - - for (const key in defaults) { - if (options[key] !== null && options[key] !== undefined) { - output[key] = options[key] - } else { - output[key] = defaults[key] - } - } - - // support legacy go arguments - if (options.count !== undefined) { - output.length = options.count - } - - if (options.p !== undefined) { - output.parents = options.p - } - - if (options.l !== undefined) { - output.long = options.l - } - - if (!output.length && output.length !== 0) { - output.length = Infinity - } - - if (output.offset < 0) { - throw errCode(new Error('cannot have negative write offset'), 'ERR_INVALID_PARAMS') - } - - if (output.length < 0) { - throw errCode(new Error('cannot have negative byte count'), 'ERR_INVALID_PARAMS') - } - - return output -} diff --git a/packages/ipfs-core/src/components/files/utils/to-async-iterator.js b/packages/ipfs-core/src/components/files/utils/to-async-iterator.js index 6ed0f1b837..2d5b5ff3da 100644 --- a/packages/ipfs-core/src/components/files/utils/to-async-iterator.js +++ b/packages/ipfs-core/src/components/files/utils/to-async-iterator.js @@ -67,7 +67,7 @@ const toAsyncIterator = (content) => { resolve({ done: false, - value: new Uint8Array(reader.result, reader.result.byteOffset, reader.result.byteLength) + value: new Uint8Array(/** @type {ArrayBuffer} */(reader.result)) }) } diff --git a/packages/ipfs-core/src/components/files/utils/to-sources.js b/packages/ipfs-core/src/components/files/utils/to-sources.js index ed3d583f5c..256e0440f8 100644 --- a/packages/ipfs-core/src/components/files/utils/to-sources.js +++ b/packages/ipfs-core/src/components/files/utils/to-sources.js @@ -1,7 +1,7 @@ 'use strict' const toMfsPath = require('./to-mfs-path') -const applyDefaultOptions = require('./apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) async function toSources (context, args, defaultOptions) { const sources = [] @@ -16,7 +16,7 @@ async function toSources (context, args, defaultOptions) { } } - options = applyDefaultOptions(options, defaultOptions) + options = mergeOptions(defaultOptions, options) return { sources: await toMfsPath(context, sources, options), diff --git a/packages/ipfs-core/src/components/files/utils/types.ts b/packages/ipfs-core/src/components/files/utils/types.ts new file mode 100644 index 0000000000..9d22f87a08 --- /dev/null +++ b/packages/ipfs-core/src/components/files/utils/types.ts @@ -0,0 +1,12 @@ +/** + * Helper type to represet monomorphic tuples with up to 8 items. + */ +export type Tuple = + | [T] + | [T, T] + | [T, T, T] + | [T, T, T, T] + | [T, T, T, T, T] + | [T, T, T, T, T, T] + | [T, T, T, T, T, T, T] + | [T, T, T, T, T, T, T, T] diff --git a/packages/ipfs-core/src/components/files/utils/update-tree.js b/packages/ipfs-core/src/components/files/utils/update-tree.js index 1eab4c6c05..dc78800482 100644 --- a/packages/ipfs-core/src/components/files/utils/update-tree.js +++ b/packages/ipfs-core/src/components/files/utils/update-tree.js @@ -51,9 +51,11 @@ const updateTree = async (context, trail, options) => { } } - log(`Final CID ${child.cid}`) + // @ts-ignore - child is possibly undefined + const { cid } = child + log(`Final CID ${cid}`) - return child.cid + return cid } module.exports = updateTree diff --git a/packages/ipfs-core/src/components/files/write.js b/packages/ipfs-core/src/components/files/write.js index a768cba977..67b341406f 100644 --- a/packages/ipfs-core/src/components/files/write.js +++ b/packages/ipfs-core/src/components/files/write.js @@ -5,7 +5,7 @@ const importer = require('ipfs-unixfs-importer') const stat = require('./stat') const mkdir = require('./mkdir') const addLink = require('./utils/add-link') -const applyDefaultOptions = require('./utils/apply-default-options') +const mergeOptions = require('merge-options').bind({ ignoreUndefined: true }) const createLock = require('./utils/create-lock') const toAsyncIterator = require('./utils/to-async-iterator') const toMfsPath = require('./utils/to-mfs-path') @@ -41,27 +41,39 @@ const defaultOptions = { } module.exports = (context) => { - return withTimeoutOption(async function mfsWrite (path, content, options) { - options = applyDefaultOptions(options, defaultOptions) + /** + * Write to an MFS path + * + * @param {string} path - The MFS path where you will write to + * @param {string|Uint8Array|AsyncIterable|Blob} content - The content to write to the path + * @param {WriteOptions & AbortOptions} [options] + * @returns {Promise} + */ + async function mfsWrite (path, content, options = {}) { + options = mergeOptions(defaultOptions, options) let source, destination, parent log('Reading source, destination and parent') await createLock().readLock(async () => { - source = await toAsyncIterator(content, options) + source = await toAsyncIterator(content) destination = await toMfsPath(context, path, options) parent = await toMfsPath(context, destination.mfsDirectory, options) })() log('Read source, destination and parent') + // @ts-ignore - parent maybe undefined if (!options.parents && !parent.exists) { throw errCode(new Error('directory does not exist'), 'ERR_NO_EXIST') } + // @ts-ignore - parent maybe undefined if (!options.create && !destination.exists) { throw errCode(new Error('file does not exist'), 'ERR_NO_EXIST') } return updateOrImport(context, path, source, destination, options) - }) + } + + return withTimeoutOption(mfsWrite) } const updateOrImport = async (context, path, source, destination, options) => { @@ -89,7 +101,7 @@ const updateOrImport = async (context, path, source, destination, options) => { // get an updated mfs path in case the root changed while we were writing const updatedPath = await toMfsPath(context, path, options) - const trail = await toTrail(context, updatedPath.mfsDirectory, options) + const trail = await toTrail(context, updatedPath.mfsDirectory) const parent = trail[trail.length - 1] if (!parent.type.includes('directory')) { @@ -244,7 +256,7 @@ const limitAsyncStreamBytes = (stream, limit) => { } const asyncZeroes = (count, chunkSize = MFS_MAX_CHUNK_SIZE) => { - const buf = new Uint8Array(chunkSize, 0) + const buf = new Uint8Array(chunkSize) const stream = { [Symbol.asyncIterator]: function * _asyncZeroes () { @@ -278,3 +290,22 @@ const countBytesStreamed = async function * (source, notify) { yield buf } } + +/** + * @typedef {Object} WriteOptions + * @property {number} [offset] - An offset to start writing to file at + * @property {number} [length] - Optionally limit how many bytes are read from the stream + * @property {boolean} [create=false] - Create the MFS path if it does not exist + * @property {boolean} [parents=false] - Create intermediate MFS paths if they do not exist + * @property {boolean} [truncate=false] - Truncate the file at the MFS path if it would have been larger than the passed content + * @property {boolean} [rawLeaves=false] - If true, DAG leaves will contain raw file data and not be wrapped in a protobuf + * @property {number} [mode] - An integer that represents the file mode + * @property {Mtime|Hrtime|Date} [mtime] - A Date object, an object with `{ secs, nsecs }` properties where secs is the number of seconds since (positive) or before (negative) the Unix Epoch began and nsecs is the number of nanoseconds since the last full second, or the output of `process.hrtime() + * @property {boolean} [flush] - If true the changes will be immediately flushed to disk + * @property {string} [hashAlg='sha2-256'] - The hash algorithm to use for any updated entries + * @property {0|1} [cidVersion=0] - The CID version to use for any updated entries + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('../../utils').Mtime} Mtime + * @typedef {import('../../utils').Hrtime} Hrtime + */ diff --git a/packages/ipfs-core/src/components/get.js b/packages/ipfs-core/src/components/get.js index ed17f942f6..5763f89cb9 100644 --- a/packages/ipfs-core/src/components/get.js +++ b/packages/ipfs-core/src/components/get.js @@ -4,10 +4,20 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile, withTimeoutOption } = require('../utils') +/** + * @param {Object} config + * @param {import('.').IPLD} config.ipld + * @param {import('.').Preload} config.preload + */ module.exports = function ({ ipld, preload }) { - return withTimeoutOption(async function * get (ipfsPath, options) { - options = options || {} - + /** + * Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. + * + * @param {CID|string} ipfsPath - An IPFS path or CID to export + * @param {Options} [options] + * @returns {AsyncIterable} + */ + async function * get (ipfsPath, options = {}) { if (options.preload !== false) { let pathComponents @@ -26,5 +36,18 @@ module.exports = function ({ ipld, preload }) { includeContent: true }) } - }) + } + + return withTimeoutOption(get) } + +/** + * @typedef {GetOptions & AbortOptions} Options + * + * @typedef {Object} GetOptions + * @property {boolean} [preload] + * + * @typedef {import('.').CID} CID + * @typedef {import('../utils').AbortOptions} AbortOptions + * @typedef {import('../utils').IPFSEntry} IPFSEntry + */ diff --git a/packages/ipfs-core/src/components/id.js b/packages/ipfs-core/src/components/id.js index 8f65bb6f3f..226dcc239e 100644 --- a/packages/ipfs-core/src/components/id.js +++ b/packages/ipfs-core/src/components/id.js @@ -6,29 +6,23 @@ const { withTimeoutOption } = require('../utils') const uint8ArrayToString = require('uint8arrays/to-string') /** - * @typedef {object} PeerIdObj - An object with the Peer identity - * @property {string} id - the Peer ID - * @property {string} publicKey - the public key of the peer as a base64 encoded string - * @property {import('multiaddr')[]} addresses - A list of multiaddrs this node is listening on - * @property {string} agentVersion - The agent version - * @property {string} protocolVersion - The supported protocol version - * @property {string[]} protocols - The supported protocols + * @param {Object} config + * @param {import('peer-id')} config.peerId + * @param {import('libp2p')} [config.libp2p] */ - -/** - * Returns the identity of the Peer - * - * @template {Record} ExtraOptions - * @callback Id - * @param {import('../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - */ - module.exports = ({ peerId, libp2p }) => { /** - * @type {Id<{}>} + * Returns the identity of the Peer + * + * @param {import('../utils').AbortOptions} [_options] + * @returns {Promise} + * @example + * ```js + * const identity = await ipfs.id() + * console.log(identity) + * ``` */ - async function id (options) { // eslint-disable-line require-await, @typescript-eslint/no-unused-vars + async function id (_options) { // eslint-disable-line require-await const id = peerId.toB58String() let addresses = [] let protocols = [] @@ -63,3 +57,14 @@ module.exports = ({ peerId, libp2p }) => { } return withTimeoutOption(id) } + +/** + * @typedef {object} PeerId + * The Peer identity + * @property {string} id - the Peer ID + * @property {string} publicKey - the public key of the peer as a base64 encoded string + * @property {import('multiaddr')[]} addresses - A list of multiaddrs this node is listening on + * @property {string} agentVersion - The agent version + * @property {string} protocolVersion - The supported protocol version + * @property {string[]} protocols - The supported protocols + */ diff --git a/packages/ipfs-core/src/components/index.js b/packages/ipfs-core/src/components/index.js index 9911775886..fd4f8c1794 100644 --- a/packages/ipfs-core/src/components/index.js +++ b/packages/ipfs-core/src/components/index.js @@ -1,19 +1,49 @@ 'use strict' +/** + * @typedef {ReturnType} Add + */ exports.add = require('./add') + +/** + * @typedef {ReturnType} AddAll + */ + exports.addAll = require('./add-all') + +/** + * @typedef {Object} Block + * @property {ReturnType} get + * @property {ReturnType} put + * @property {ReturnType} rm + * @property {ReturnType} stat + */ exports.block = { get: require('./block/get'), put: require('./block/put'), rm: require('./block/rm'), stat: require('./block/stat') } + +/** + * @typedef {Object} BitSwap + * @property {ReturnType} stat + * @property {ReturnType} unwant + * @property {ReturnType} wantlist + */ exports.bitswap = { stat: require('./bitswap/stat'), unwant: require('./bitswap/unwant'), wantlist: require('./bitswap/wantlist'), wantlistForPeer: require('./bitswap/wantlist-for-peer') } + +/** + * @typedef {Object} Bootstrap + * @property {ReturnType} add + * @property {ReturnType} list + * @property {ReturnType} rm + */ exports.bootstrap = { add: require('./bootstrap/add'), clear: require('./bootstrap/clear'), @@ -21,21 +51,63 @@ exports.bootstrap = { reset: require('./bootstrap/reset'), rm: require('./bootstrap/rm') } + +/** + * @typedef {ReturnType} Cat + */ exports.cat = require('./cat') + +/** + * @typedef {ReturnType} Config + */ exports.config = require('./config') + +/** + * @typedef {Object} DAG + * @property {ReturnType} get + * @property {ReturnType} put + * @property {ReturnType} resolve + * @property {ReturnType} tree + */ exports.dag = { get: require('./dag/get'), put: require('./dag/put'), resolve: require('./dag/resolve'), tree: require('./dag/tree') } + +/** @typedef {ReturnType} DHT */ exports.dht = require('./dht') + +/** @typedef {ReturnType} DNS */ exports.dns = require('./dns') + +/** @typedef {ReturnType} Files */ exports.files = require('./files') + +/** @typedef {ReturnType} Get */ exports.get = require('./get') + +/** @typedef {ReturnType} ID */ exports.id = require('./id') + +/** @typedef {ReturnType} Init */ exports.init = require('./init') + +/** @typedef {ReturnType} IsOnline */ exports.isOnline = require('./is-online') + +/** + * @typedef {Object} Key + * @property {ReturnType} export + * @property {ReturnType} gen + * @property {ReturnType} import + * @property {ReturnType} info + * @property {ReturnType} list + * @property {ReturnType} rename + * @property {ReturnType} rm + */ + exports.key = { export: require('./key/export'), gen: require('./key/gen'), @@ -45,8 +117,25 @@ exports.key = { rename: require('./key/rename'), rm: require('./key/rm') } + +/** @typedef {ReturnType} LibP2P */ exports.libp2p = require('./libp2p') + +/** @typedef {ReturnType} LS */ exports.ls = require('./ls') + +/** + * @typedef {Object} Name + * @property {ReturnType} publish + * @property {ReturnType} resolve + * @property {NamePubSub} pubsub + * + * @typedef {Object} NamePubSub + * @property {ReturnType} cancel + * @property {ReturnType} state + * @property {ReturnType} subs + */ + exports.name = { publish: require('./name/publish'), pubsub: { @@ -56,6 +145,23 @@ exports.name = { }, resolve: require('./name/resolve') } + +/** + * @typedef {Object} ObjectAPI + * @property {ReturnType} data + * @property {ReturnType} get + * @property {ReturnType} links + * @property {ReturnType} new + * @property {ReturnType} put + * @property {ReturnType} stat + * @property {ObjectPath} patch + * + * @typedef {Object} ObjectPath + * @property {ReturnType} addLink + * @property {ReturnType} rmLink + * @property {ReturnType} appendData + * @property {ReturnType} setData + */ exports.object = { data: require('./object/data'), get: require('./object/get'), @@ -70,6 +176,14 @@ exports.object = { put: require('./object/put'), stat: require('./object/stat') } + +/** + * @typedef Pin + * @property {ReturnType} add + * @property {ReturnType} addAll + * @property {ReturnType} ls + * @property {ReturnType} rm + */ exports.pin = { add: require('./pin/add'), addAll: require('./pin/add-all'), @@ -77,20 +191,61 @@ exports.pin = { rm: require('./pin/rm'), rmAll: require('./pin/rm-all') } + +/** + * @typedef {ReturnType} Ping + */ exports.ping = require('./ping') + +/** + * @typedef {ReturnType} PubSub + */ exports.pubsub = require('./pubsub') + +/** + * @typedef {ReturnType} Refs + * @typedef {ReturnType} LocalRefs + * @typedef {Refs & {local:LocalRefs}} RefsWithLocal + */ exports.refs = Object.assign(require('./refs'), { local: require('./refs/local') }) + +/** + * @typedef {Object} Repo + * @property {ReturnType} gc + * @property {ReturnType} stat + * @property {ReturnType} version + */ exports.repo = { gc: require('./repo/gc'), stat: require('./repo/stat'), version: require('./repo/version') } + +/** @typedef {ReturnType} Resolve */ exports.resolve = require('./resolve') + +/** @typedef {ReturnType} Start */ exports.start = require('./start') + +/** + * @typedef {Object} Stats + * @property {ReturnType} bw + */ exports.stats = { bw: require('./stats/bw') } + +/** @typedef {ReturnType} Stop */ exports.stop = require('./stop') + +/** + * @typedef {Object} Swarm + * @property {ReturnType} addrs + * @property {ReturnType} connect + * @property {ReturnType} disconnect + * @property {ReturnType} localAddrs + * @property {ReturnType} peers + */ exports.swarm = { addrs: require('./swarm/addrs'), connect: require('./swarm/connect'), @@ -98,4 +253,70 @@ exports.swarm = { localAddrs: require('./swarm/local-addrs'), peers: require('./swarm/peers') } + +/** + * @typedef {ReturnType} Version + */ exports.version = require('./version') + +/** + * @typedef {ReturnType} Preload + * @typedef {RWLock} GCLock + * + * @typedef {Object} RWLock + * @property {() => Promise} readLock + * @property {() => Promise} writeLock + * + * @typedef {() => void} Lock + * + * // External library types + * @typedef {import('cids')} CID + * @typedef {import('peer-id')} PeerId + * @typedef {import('multiaddr')} Multiaddr + * + * // Justs pretending these things are typed & hopefully in the future they + * // wil be. + * @typedef {import('ipld')} IPLD + * @typedef {import('ipld').Config} IPLDConfig + * @typedef {import('ipld-block')} IPLDBlock + * @typedef {import('ipfs-repo')} IPFSRepo + * @typedef {import('ipfs-block-service')} IPFSBlockService + * @typedef {import('ipfs-bitswap')} IPFSBitSwap + * @typedef {import('libp2p')} LibP2PService + * @typedef {import('libp2p').Config} LibP2PConfig + */ + +/** + * @typedef {Object} IPFSAPI + * @property {Add} add + * @property {BitSwap} bitswap + * @property {Block} block + * @property {Bootstrap} bootstrap + * @property {Cat} cat + * @property {Config} config + * @property {DAG} dag + * @property {DHT} dht + * @property {DNS} dns + * @property {Files} files + * @property {Get} get + * @property {ID} id + * @property {IsOnline} isOnline + * @property {Key} key + * @property {LibP2P} libp2p + * @property {LS} ls + * @property {Name} name + * @property {ObjectAPI} object + * @property {Pin} pin + * @property {Ping} ping + * @property {PubSub} pubsub + * @property {Refs} refs + * @property {Repo} repo + * @property {Resolve} resolve + * @property {Stats} stats + * @property {Swarm} swarm + * @property {Version} version + * + * @property {Init} init + * @property {Start} start + * @property {Stop} stop + */ diff --git a/packages/ipfs-core/src/components/init.js b/packages/ipfs-core/src/components/init.js index d6963091a7..f8ef2a25f7 100644 --- a/packages/ipfs-core/src/components/init.js +++ b/packages/ipfs-core/src/components/init.js @@ -19,8 +19,13 @@ const { NotEnabledError } = require('../errors') const BlockService = require('ipfs-block-service') + +/** + * @typedef {import('.').IPLD} IPLD + */ const Ipld = require('ipld') const getDefaultIpldOptions = require('../runtime/ipld-nodejs') + const createPreloader = require('../preload') const { ERR_REPO_NOT_INITIALIZED } = require('ipfs-repo').errors const IPNS = require('../ipns') @@ -29,157 +34,177 @@ const initAssets = require('../runtime/init-assets-nodejs') const PinManager = require('./pin/pin-manager') const Components = require('./') +/** + * @param {Object} config + * @param {import('../api-manager')} config.apiManager + * @param {(...args:any[]) => void} config.print + * @param {ConstructorOptions} config.options + */ module.exports = ({ apiManager, print, options: constructorOptions -}) => async function init (options) { - const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) - - try { - options = options || {} - - if (typeof constructorOptions.init === 'object') { - options = mergeOptions(constructorOptions.init, options) - } +}) => +/** + * @param {Object} options + */ + async function init (options = {}) { + const { cancel } = apiManager.update({ init: () => { throw new AlreadyInitializingError() } }) + + try { + if (typeof constructorOptions.init === 'object') { + options = mergeOptions(constructorOptions.init, options) + } - options.pass = options.pass || constructorOptions.pass + options.pass = options.pass || constructorOptions.pass - if (constructorOptions.config) { - options.config = mergeOptions(options.config, constructorOptions.config) - } + if (constructorOptions.config) { + options.config = mergeOptions(options.config, constructorOptions.config) + } - options.repo = options.repo || constructorOptions.repo - options.repoAutoMigrate = options.repoAutoMigrate || constructorOptions.repoAutoMigrate + options.repo = options.repo || constructorOptions.repo + options.repoAutoMigrate = options.repoAutoMigrate || constructorOptions.repoAutoMigrate - const repo = typeof options.repo === 'string' || options.repo == null - ? createRepo({ path: options.repo, autoMigrate: options.repoAutoMigrate, silent: constructorOptions.silent }) - : options.repo + const repo = typeof options.repo === 'string' || options.repo == null + ? createRepo({ path: options.repo, autoMigrate: options.repoAutoMigrate, silent: constructorOptions.silent }) + : options.repo - let isInitialized = true + let isInitialized = true - if (repo.closed) { - try { - await repo.open() - } catch (err) { - if (err.code === ERR_REPO_NOT_INITIALIZED) { - isInitialized = false - } else { - throw err + if (repo.closed) { + try { + await repo.open() + } catch (err) { + if (err.code === ERR_REPO_NOT_INITIALIZED) { + isInitialized = false + } else { + throw err + } } } - } - - if (!isInitialized && options.allowNew === false) { - throw new NotEnabledError('new repo initialization is not enabled') - } - const { peerId, keychain } = isInitialized - ? await initExistingRepo(repo, options) - : await initNewRepo(repo, { ...options, print }) - - log('peer created') - - const blockService = new BlockService(repo) - const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) - - const preload = createPreloader(constructorOptions.preload) - await preload.start() + if (!isInitialized && options.allowNew === false) { + throw new NotEnabledError('new repo initialization is not enabled') + } - // Make sure GC lock is specific to repo, for tests where there are - // multiple instances of IPFS - const gcLock = mortice(repo.path, { singleProcess: constructorOptions.repoOwner !== false }) - const dag = { - get: Components.dag.get({ ipld, preload }), - resolve: Components.dag.resolve({ ipld, preload }), - tree: Components.dag.tree({ ipld, preload }) - } - const object = { - data: Components.object.data({ ipld, preload }), - get: Components.object.get({ ipld, preload }), - links: Components.object.links({ dag }), - new: Components.object.new({ ipld, preload }), - patch: { - addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), - appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), - rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), - setData: Components.object.patch.setData({ ipld, gcLock, preload }) - }, - put: Components.object.put({ ipld, gcLock, preload }), - stat: Components.object.stat({ ipld, preload }) - } + const { peerId, keychain } = isInitialized + ? await initExistingRepo(repo, options) + : await initNewRepo(repo, { ...options, print }) + + log('peer created') + + const blockService = new BlockService(repo) + const ipld = new Ipld(getDefaultIpldOptions(blockService, constructorOptions.ipld, log)) + + const preload = createPreloader(constructorOptions.preload) + await preload.start() + + // Make sure GC lock is specific to repo, for tests where there are + // multiple instances of IPFS + const gcLock = mortice(repo.path, { singleProcess: constructorOptions.repoOwner !== false }) + const dag = { + get: Components.dag.get({ ipld, preload }), + resolve: Components.dag.resolve({ ipld, preload }), + tree: Components.dag.tree({ ipld, preload }), + // FIXME: resolve this circular dependency + get put () { + const put = Components.dag.put({ ipld, pin, gcLock, preload }) + Object.defineProperty(this, 'put', { value: put }) + return put + } + } - const pinManager = new PinManager(repo, dag) - const pinAddAll = Components.pin.addAll({ pinManager, gcLock, dag }) - const pinRmAll = Components.pin.rmAll({ pinManager, gcLock, dag }) + const object = { + data: Components.object.data({ ipld, preload }), + get: Components.object.get({ ipld, preload }), + links: Components.object.links({ dag }), + new: Components.object.new({ ipld, preload }), + patch: { + addLink: Components.object.patch.addLink({ ipld, gcLock, preload }), + appendData: Components.object.patch.appendData({ ipld, gcLock, preload }), + rmLink: Components.object.patch.rmLink({ ipld, gcLock, preload }), + setData: Components.object.patch.setData({ ipld, gcLock, preload }) + }, + put: Components.object.put({ ipld, gcLock, preload }), + stat: Components.object.stat({ ipld, preload }) + } - const pin = { - add: Components.pin.add({ addAll: pinAddAll }), - addAll: pinAddAll, - ls: Components.pin.ls({ pinManager, dag }), - rm: Components.pin.rm({ rmAll: pinRmAll }), - rmAll: pinRmAll - } + const pinManager = new PinManager(repo, dag) + const pinAddAll = Components.pin.addAll({ pinManager, gcLock, dag }) + const pinRmAll = Components.pin.rmAll({ pinManager, gcLock, dag }) - // FIXME: resolve this circular dependency - dag.put = Components.dag.put({ ipld, pin, gcLock, preload }) + const pin = { + add: Components.pin.add({ addAll: pinAddAll }), + addAll: pinAddAll, + ls: Components.pin.ls({ pinManager, dag }), + rm: Components.pin.rm({ rmAll: pinRmAll }), + rmAll: pinRmAll + } - const block = { - get: Components.block.get({ blockService, preload }), - put: Components.block.put({ blockService, pin, gcLock, preload }), - rm: Components.block.rm({ blockService, gcLock, pinManager }), - stat: Components.block.stat({ blockService, preload }) - } + const block = { + get: Components.block.get({ blockService, preload }), + put: Components.block.put({ blockService, pin, gcLock, preload }), + rm: Components.block.rm({ blockService, gcLock, pinManager }), + stat: Components.block.stat({ blockService, preload }) + } - const addAll = Components.addAll({ block, preload, pin, gcLock, options: constructorOptions }) + const addAll = Components.addAll({ block, preload, pin, gcLock, options: constructorOptions }) - if (!isInitialized && !options.emptyRepo) { + if (!isInitialized && !options.emptyRepo) { // add empty unixfs dir object (go-ipfs assumes this exists) - const emptyDirCid = await addEmptyDir({ dag, pin }) - - log('adding default assets') - await initAssets({ addAll, print }) + const emptyDirCid = await addEmptyDir({ dag, pin }) - log('initializing IPNS keyspace') - // Setup the offline routing for IPNS. - // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. - const offlineDatastore = new OfflineDatastore(repo) - const ipns = new IPNS(offlineDatastore, repo.datastore, peerId, keychain, { pass: options.pass }) - await ipns.initializeKeyspace(peerId.privKey, emptyDirCid.toString()) - } - - const api = createApi({ - add: Components.add({ addAll }), - addAll, - apiManager, - constructorOptions, - block, - blockService, - dag, - gcLock, - initOptions: options, - ipld, - keychain, - object, - peerId, - pin, - pinManager, - preload, - print, - repo - }) + log('adding default assets') + await initAssets({ addAll, print }) - apiManager.update(api, () => { throw new NotStartedError() }) + log('initializing IPNS keyspace') + // Setup the offline routing for IPNS. + // This is primarily used for offline ipns modifications, such as the initializeKeyspace feature. + const offlineDatastore = new OfflineDatastore(repo) + const ipns = new IPNS(offlineDatastore, repo.datastore, peerId, keychain, { pass: options.pass }) + await ipns.initializeKeyspace(peerId.privKey, emptyDirCid.toString()) + } - /** @type {typeof api} */ - const initializedApi = apiManager.api - return initializedApi - } catch (err) { - cancel() - throw err + const api = createApi({ + add: Components.add({ addAll }), + addAll, + apiManager, + constructorOptions, + block, + blockService, + dag, + gcLock, + initOptions: options, + ipld, + keychain, + object, + peerId, + pin, + pinManager, + preload, + print, + repo + }) + + return apiManager.update(api, () => { throw new NotStartedError() }).api + } catch (err) { + cancel() + throw err + } } -} +/** + * @param {IPFSRepo} repo + * @param {Object} options + * @param {PrivateKey} options.privateKey + * @param {boolean} [options.emptyRepo] + * @param {number} [options.bits=2048] - Number of bits to use in the generated key + * @param {string[]} options.profiles + * @param {IPFSConfig} options.config + * @param {string} [options.pass] + * @param {(...args:any[]) => void} options.print + * @param {KeyType} [options.algorithm='RSA'] + */ async function initNewRepo (repo, { privateKey, emptyRepo, algorithm, bits, profiles, config, pass, print }) { emptyRepo = emptyRepo || false bits = bits == null ? 2048 : Number(bits) @@ -203,8 +228,6 @@ async function initNewRepo (repo, { privateKey, emptyRepo, algorithm, bits, prof PrivKey: uint8ArrayToString(peerId.privKey.bytes, 'base64pad') } - privateKey = peerId.privKey - log('peer identity: %s', config.Identity.PeerID) await repo.init(config) @@ -233,6 +256,13 @@ async function initNewRepo (repo, { privateKey, emptyRepo, algorithm, bits, prof return { peerId, keychain: libp2p.keychain } } +/** + * @param {IPFSRepo} repo + * @param {Object} options + * @param {IPFSConfig} [options.config] + * @param {string[]} [options.profiles] + * @param {string} [options.pass] + */ async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { let config = await repo.config.getAll() @@ -263,7 +293,14 @@ async function initExistingRepo (repo, { config: newConfig, profiles, pass }) { return { peerId, keychain: libp2p.keychain } } -function createPeerId ({ privateKey, algorithm = 'rsa', bits, print }) { +/** + * @param {Object} options + * @param {KeyType} [options.algorithm='RSA'] + * @param {PrivateKey} options.privateKey + * @param {number} options.bits + * @param {(...args:any[]) => void} options.print + */ +function createPeerId ({ privateKey, algorithm = 'RSA', bits, print }) { if (privateKey) { log('using user-supplied private-key') return typeof privateKey === 'object' @@ -272,6 +309,7 @@ function createPeerId ({ privateKey, algorithm = 'rsa', bits, print }) { } else { // Generate peer identity keypair + transform to desired format + add to config. print('generating %s-bit (rsa only) %s keypair...', bits, algorithm) + // @ts-ignore - expects "Ed25519" | "RSA" | "secp256k1" instoad of string return PeerId.create({ keyType: algorithm, bits }) } } @@ -326,8 +364,9 @@ function createApi ({ } const resolve = Components.resolve({ ipld }) - const refs = Components.refs({ ipld, resolve, preload }) - refs.local = Components.refs.local({ repo }) + const refs = Object.assign(Components.refs({ ipld, resolve, preload }), { + local: Components.refs.local({ repo }) + }) const api = { add, @@ -367,7 +406,7 @@ function createApi ({ pin, refs, repo: { - gc: Components.repo.gc({ gcLock, pin, pinManager, refs, repo }), + gc: Components.repo.gc({ gcLock, pin, refs, repo }), stat: Components.repo.stat({ repo }), version: Components.repo.version({ repo }) }, @@ -404,3 +443,119 @@ function createApi ({ return api } + +/** + * @template {boolean | InitOptions} Init + * @template {boolean} Start + * + * @typedef {Object} ConstructorOptions + * Options argument can be used to specify advanced configuration. + * @property {RepoOption} [repo='~/.jsipfs'] + * @property {boolean} [repoAutoMigrate=true] - `js-ipfs` comes bundled with a + * tool that automatically migrates your IPFS repository when a new version is + * available. + * @property {Init} [init=true] - Perform repo initialization steps when creating + * the IPFS node. + * Note that *initializing* a repo is different from creating an instance of + * [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor + * sets many special properties when initializing a repo, so you should usually + * not try and call `repoInstance.init()` yourself. + * @property {Start} [start=true] - If `false`, do not automatically + * start the IPFS node. Instead, you’ll need to manually call + * [`node.start()`](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs/docs/MODULE.md#nodestart) + * yourself. + * @property {string} [pass=null] - A passphrase to encrypt/decrypt your keys. + * @property {boolean} [silent=false] - Prevents all logging output from the + * IPFS node. (Default: `false`) + * @property {RelayOptions} [relay={ enabled: true, hop: { enabled: false, active: false } }] + * - Configure circuit relay (see the [circuit relay tutorial] + * (https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) + * to learn more). + * @property {boolean} [offline=false] - Run ipfs node offline. The node does + * not connect to the rest of the network but provides a local API. + * @property {PreloadOptions} [preload] - Configure remote preload nodes. + * The remote will preload content added on this node, and also attempt to + * preload objects requested by this node. + * @property {ExperimentalOptions} [EXPERIMENTAL] - Enable and configure + * experimental features. + * @property {object} [config] - Modify the default IPFS node config. This + * object will be *merged* with the default config; it will not replace it. + * (Default: [`config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-nodejs.js) + * in Node.js, [`config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-browser.js) + * in browsers) + * @property {import('.').IPLDConfig} [ipld] - Modify the default IPLD config. This object + * will be *merged* with the default config; it will not replace it. Check IPLD + * [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information + * on the available options. (Default: [`ipld-nodejs.js`] + * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) + * in browsers) + * @property {object|Function} [libp2p] - The libp2p option allows you to build + * your libp2p node by configuration, or via a bundle function. If you are + * looking to just modify the below options, using the object format is the + * quickest way to get the default features of libp2p. If you need to create a + * more customized libp2p node, such as with custom transports or peer/content + * routers that need some of the ipfs data on startup, a custom bundle is a + * great way to achieve this. + * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). + * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) + * for the list of options libp2p supports. + * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) + * in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in + * browsers. + * + * @property {boolean} [repoOwner] + */ + +/** + * @typedef {IPFSRepo|string} RepoOption + * The file path at which to store the IPFS node’s data. Alternatively, you + * can set up a customized storage system by providing an `ipfs.Repo` instance. + * + * @example + * ```js + * // Store data outside your user directory + * const node = await IPFS.create({ repo: '/var/ipfs/data' }) + * ``` + * + * @typedef {object} RelayOptions + * @property {boolean} [enabled] - Enable circuit relay dialer and listener. (Default: `true`) + * @property {object} [hop] + * @property {boolean} [hop.enabled] - Make this node a relay (other nodes can connect *through* it). (Default: `false`) + * @property {boolean} [hop.active] - Make this an *active* relay node. Active relay nodes will attempt to dial a destination peer even if that peer is not yet connected to the relay. (Default: `false`) + * + * @typedef {object} PreloadOptions + * @property {boolean} [enabled] - Enable content preloading (Default: `true`) + * @property {number} [interval] + * @property {string[]} [addresses] - Multiaddr API addresses of nodes that should preload content. + * **NOTE:** nodes specified here should also be added to your node's bootstrap address list at `config.Boostrap`. + * + * @typedef {object} ExperimentalOptions + * @property {boolean} [ipnsPubsub] - Enable pub-sub on IPNS. (Default: `false`) + * @property {boolean} [sharding] - Enable directory sharding. Directories that have many child objects will be represented by multiple DAG nodes instead of just one. It can improve lookup performance when a directory has several thousand files or more. (Default: `false`) + * + * @typedef {Object} InitOptions + * @property {boolean} [emptyRepo=false] - Whether to remove built-in assets, + * like the instructional tour and empty mutable file system, from the repo. + * @property {number} [bits=2048] - Number of bits to use in the generated key + * pair (rsa only). + * @property {PrivateKey} [privateKey] - A pre-generated private key to use. + * **NOTE: This overrides `bits`.** + * @property {string} [pass] - A passphrase to encrypt keys. You should + * generally use the top-level `pass` option instead of the `init.pass` + * option (this one will take its value from the top-level option if not set). + * @property {string[]} [profiles] - Apply profile settings to config. + * @property {boolean} [allowNew=true] - Set to `false` to disallow + * initialization if the repo does not already exist. + * @property {IPFSConfig} [config] + * + * @typedef {import('./config').IPFSConfig} IPFSConfig + * @typedef {import('.').IPFSRepo} IPFSRepo + * + * @typedef {'RSA' | 'ed25519' | 'secp256k1'} KeyType + * + * @typedef {string|PeerId} PrivateKey + * Can be either a base64 string or a [PeerId](https://github.com/libp2p/js-peer-id) + * instance. + * + * @typedef {import('libp2p').Keychain} Keychain + */ diff --git a/packages/ipfs-core/src/components/is-online.js b/packages/ipfs-core/src/components/is-online.js index 3aad832f57..450b62e153 100644 --- a/packages/ipfs-core/src/components/is-online.js +++ b/packages/ipfs-core/src/components/is-online.js @@ -1,5 +1,8 @@ 'use strict' -module.exports = ({ libp2p }) => { - return () => Boolean(libp2p && libp2p.isStarted()) -} +/** + * @param {Object} config + * @param {import('libp2p')} [config.libp2p] + */ +module.exports = ({ libp2p }) => () => + Boolean(libp2p && libp2p.isStarted()) diff --git a/packages/ipfs-core/src/components/key/gen.js b/packages/ipfs-core/src/components/key/gen.js index a9be31f5cb..e3eaea5ff0 100644 --- a/packages/ipfs-core/src/components/key/gen.js +++ b/packages/ipfs-core/src/components/key/gen.js @@ -3,8 +3,7 @@ const { withTimeoutOption } = require('../../utils') module.exports = ({ keychain }) => { - return withTimeoutOption((name, options) => { - options = options || {} + return withTimeoutOption((name, options = {}) => { return keychain.createKey(name, options.type || 'rsa', options.size || 2048) }) } diff --git a/packages/ipfs-core/src/components/libp2p.js b/packages/ipfs-core/src/components/libp2p.js index ee8453c7cc..32faca1dd2 100644 --- a/packages/ipfs-core/src/components/libp2p.js +++ b/packages/ipfs-core/src/components/libp2p.js @@ -5,17 +5,24 @@ const mergeOptions = require('merge-options') const errCode = require('err-code') const PubsubRouters = require('../runtime/libp2p-pubsub-routers-nodejs') +/** + * @param {Object} config + * @param {import('.').IPFSRepo} config.repo + * @param {Object} [config.options] + * @param {import('.').PeerId} [config.peerId] + * @param {string[]} [config.multiaddrs] + * @param {{pass?:string}} [config.keychainConfig] + * @param {import('.').LibP2PConfig} [config.config] + * @returns {import('.').LibP2PService} + */ module.exports = ({ - options, + options = {}, peerId, multiaddrs = [], repo, keychainConfig = {}, - config + config = {} }) => { - options = options || {} - config = config || {} - const { datastore, keys } = repo const libp2pOptions = getLibp2pOptions({ diff --git a/packages/ipfs-core/src/components/ls.js b/packages/ipfs-core/src/components/ls.js index 286af3339e..e1cb13b2bd 100644 --- a/packages/ipfs-core/src/components/ls.js +++ b/packages/ipfs-core/src/components/ls.js @@ -4,10 +4,20 @@ const exporter = require('ipfs-unixfs-exporter') const errCode = require('err-code') const { normalizeCidPath, mapFile, withTimeoutOption } = require('../utils') +/** + * @param {Object} config + * @param {import('.').IPLD} config.ipld + * @param {import('.').Preload} config.preload + */ module.exports = function ({ ipld, preload }) { - return withTimeoutOption(async function * ls (ipfsPath, options) { - options = options || {} - + /** + * Lists a directory from IPFS that is addressed by a valid IPFS Path. + * + * @param {string|CID} ipfsPath - An IPFS path or CID to list + * @param {Options} options + * @returns {AsyncIterable} + */ + async function * ls (ipfsPath, options = {}) { const path = normalizeCidPath(ipfsPath) const recursive = options.recursive const pathComponents = path.split('/') @@ -50,5 +60,22 @@ module.exports = function ({ ipld, preload }) { } throw errCode(new Error(`Unknown UnixFS type ${file.unixfs.type}`), 'ERR_UNKNOWN_UNIXFS_TYPE') - }) + } + + return withTimeoutOption(ls) } + +/** + * @typedef {import('../utils').IPFSEntry} LSEntry + * + * @typedef {LSOptions & AbortOptions} Options + * + * @typedef {Object} LSOptions + * @property {boolean} [recursive] + * @property {boolean} [preload] + * @property {boolean} [includeContent] + * + * @typedef {import('../utils').AbortOptions} AbortOptions + * + * @typedef {import('.').CID} CID + */ diff --git a/packages/ipfs-core/src/components/name/publish.js b/packages/ipfs-core/src/components/name/publish.js index bf71fda407..217d2e41ca 100644 --- a/packages/ipfs-core/src/components/name/publish.js +++ b/packages/ipfs-core/src/components/name/publish.js @@ -5,21 +5,22 @@ const parseDuration = require('parse-duration').default const crypto = require('libp2p-crypto') const errcode = require('err-code') -const log = debug('ipfs:name:publish') -log.error = debug('ipfs:name:publish:error') +const log = Object.assign(debug('ipfs:name:publish'), { + error: debug('ipfs:name:publish:error') +}) const { OFFLINE_ERROR, normalizePath, withTimeoutOption } = require('../../utils') const { resolvePath } = require('./utils') -/** - * @typedef { import("../index") } IPFS - */ - /** * IPNS - Inter-Planetary Naming System * - * @param {IPFS} self - * @returns {Object} + * @param {Object} config + * @param {import('../../ipns')} config.ipns + * @param {import('../index').DAG} config.dag + * @param {import('peer-id')} config.peerId + * @param {import('../index').IsOnline} config.isOnline + * @param {import('../init').Keychain} config.keychain */ module.exports = ({ ipns, dag, peerId, isOnline, keychain }) => { const lookupKey = async keyName => { @@ -45,21 +46,20 @@ module.exports = ({ ipns, dag, peerId, isOnline, keychain }) => { * which is the hash of its public key. * * @param {string} value - ipfs path of the object to be published. - * @param {Object} options - ipfs publish options. - * @param {boolean} options.resolve - resolve given path before publishing. - * @param {string} options.lifetime - time duration that the record will be valid for. - This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are - "ns", "ms", "s", "m", "h". Default is 24h. - * @param {string} options.ttl - time duration this record should be cached for (NOT IMPLEMENTED YET). - * This accepts durations such as "300s", "1.5h" or "2h45m". Valid time units are - "ns", "ms", "s", "m", "h" (caution: experimental). - * @param {string} options.key - name of the key to be used, as listed by 'ipfs key list -l'. - * @param {function(Error)} [callback] - * @returns {Promise|void} + * @param {PublishOptions} [options] + * @returns {Promise} + * @example + * ```js + * // The address of your files. + * const addr = '/ipfs/QmbezGequPwcsWo8UL4wDF6a8hYwM1hmbzYv2mnKkEWaUp' + * const res = await ipfs.name.publish(addr) + * // You now have a res which contains two fields: + * // - name: the name under which the content was published. + * // - value: the "real" address to which Name points. + * console.log(`https://gateway.ipfs.io/ipns/${res.name}`) + * ``` */ - return withTimeoutOption(async function publish (value, options) { - options = options || {} - + async function publish (value, options = {}) { const resolve = !(options.resolve === false) const lifetime = options.lifetime || '24h' const key = options.key || 'self' @@ -78,12 +78,12 @@ module.exports = ({ ipns, dag, peerId, isOnline, keychain }) => { throw err } - let pubLifetime + let pubLifetime = 0 try { - pubLifetime = parseDuration(lifetime) + pubLifetime = parseDuration(lifetime) || 0 // Calculate lifetime with nanoseconds precision - pubLifetime = pubLifetime.toFixed(6) + pubLifetime = parseFloat(pubLifetime.toFixed(6)) } catch (err) { log.error(err) throw err @@ -98,5 +98,30 @@ module.exports = ({ ipns, dag, peerId, isOnline, keychain }) => { // Start publishing process return ipns.publish(results[0], value, pubLifetime) - }) + } + + return withTimeoutOption(publish) } + +/** + * @typedef {PublishSettings & AbortOptions} PublishOptions + * ipfs publish options. + * + * @typedef {Object} PublishSettings + * @property {boolean} [resolve=true] - Resolve given path before publishing. + * @property {string} [lifetime='24h'] - Time duration of the record. + * @property {string} [ttl] - Time duration this record should be cached. + * @property {string} [key=self] - Name of the key to be used. + * @property {boolean} [allowOffline=true] - When offline, save the IPNS record + * to the the local datastore without broadcasting to the network instead of + * simply failing. + * + * This option is not yet implemented in js-ipfs. See tracking issue [ipfs/js-ipfs#1997] + * (https://github.com/ipfs/js-ipfs/issues/1997). + * + * @typedef {Object} PublishResult + * @property {string} name + * @property {string} value + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/name/pubsub/cancel.js b/packages/ipfs-core/src/components/name/pubsub/cancel.js index dd6a43a003..1eb1b50985 100644 --- a/packages/ipfs-core/src/components/name/pubsub/cancel.js +++ b/packages/ipfs-core/src/components/name/pubsub/cancel.js @@ -3,16 +3,34 @@ const { getPubsubRouting } = require('./utils') const { withTimeoutOption } = require('../../../utils') +/** + * @param {Object} config + * @param {import('../../../ipns')} config.ipns + * @param {import('../../init').ConstructorOptions} config.options + */ module.exports = ({ ipns, options: constructorOptions }) => { /** * Cancel a name subscription. * - * @param {string} name - subscription name. - * @param {object} [options] + * @param {string} name - The name of the subscription to cancel. + * @param {AbortOptions} [options] * @returns {Promise<{ canceled: boolean }>} + * @example + * ```js + * const name = 'QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm' + * const result = await ipfs.name.pubsub.cancel(name) + * console.log(result.canceled) + * // Logs: true + * ``` */ - return withTimeoutOption(async function cancel (name, options) { // eslint-disable-line require-await + async function cancel (name, options) { // eslint-disable-line require-await const pubsub = getPubsubRouting(ipns, constructorOptions) return pubsub.cancel(name, options) - }) + } + + return withTimeoutOption(cancel) } + +/** + * @typedef {import('../../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/name/pubsub/state.js b/packages/ipfs-core/src/components/name/pubsub/state.js index 0fb690d55d..f122da1396 100644 --- a/packages/ipfs-core/src/components/name/pubsub/state.js +++ b/packages/ipfs-core/src/components/name/pubsub/state.js @@ -7,13 +7,25 @@ module.exports = ({ ipns, options: constructorOptions }) => { /** * Query the state of IPNS pubsub. * - * @returns {Promise} + * @param {AbortOptions} [_options] + * @returns {Promise<{ enabled: boolean }>} + * ```js + * const result = await ipfs.name.pubsub.state() + * console.log(result.enabled) + * // Logs: true + * ``` */ - return withTimeoutOption(async function state (options) { // eslint-disable-line require-await + async function state (_options) { // eslint-disable-line require-await try { return { enabled: Boolean(getPubsubRouting(ipns, constructorOptions)) } } catch (err) { - return false + return { enabled: false } } - }) + } + + return withTimeoutOption(state) } + +/** + * @typedef {import('../../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/name/pubsub/subs.js b/packages/ipfs-core/src/components/name/pubsub/subs.js index a581c32bcf..6e84f8ec3c 100644 --- a/packages/ipfs-core/src/components/name/pubsub/subs.js +++ b/packages/ipfs-core/src/components/name/pubsub/subs.js @@ -7,11 +7,23 @@ module.exports = ({ ipns, options: constructorOptions }) => { /** * Show current name subscriptions. * - * @param {function(Error)} [callback] + * @param {AbortOptions} [options] * @returns {Promise} + * @example + * ```js + * const result = await ipfs.name.pubsub.subs() + * console.log(result) + * // Logs: ['/ipns/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm'] + * ``` */ - return withTimeoutOption(async function subs (options) { // eslint-disable-line require-await + async function subs (options) { // eslint-disable-line require-await const pubsub = getPubsubRouting(ipns, constructorOptions) return pubsub.getSubscriptions(options) - }) + } + + return withTimeoutOption(subs) } + +/** + * @typedef {import('../../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/name/resolve.js b/packages/ipfs-core/src/components/name/resolve.js index c038acb8c2..ad3eb704f5 100644 --- a/packages/ipfs-core/src/components/name/resolve.js +++ b/packages/ipfs-core/src/components/name/resolve.js @@ -2,51 +2,61 @@ const debug = require('debug') const errcode = require('err-code') +/** @type {typeof Object.assign} */ const mergeOptions = require('merge-options') const CID = require('cids') const isDomain = require('is-domain-name') -const log = debug('ipfs:name:resolve') -log.error = debug('ipfs:name:resolve:error') +const log = Object.assign(debug('ipfs:name:resolve'), { + error: debug('ipfs:name:resolve:error') +}) const { OFFLINE_ERROR, withTimeoutOption } = require('../../utils') -const appendRemainder = async (result, remainder) => { - result = await result - - if (remainder.length) { - return result + '/' + remainder.join('/') - } - - return result -} - /** - * @typedef { import("../index") } IPFS + * + * @param {string} result + * @param {string[]} remainder + * @returns {string} */ +const appendRemainder = (result, remainder) => + remainder.length > 0 + ? result + '/' + remainder.join('/') + : result /** * IPNS - Inter-Planetary Naming System * - * @param {IPFS} self - * @returns {Object} + * @param {Object} config + * @param {import('../index').DNS} config.dns + * @param {import('../../ipns')} config.ipns + * @param {import('peer-id')} config.peerId + * @param {import('../index').IsOnline} config.isOnline + * @param {{offline?:boolean}} config.options */ module.exports = ({ dns, ipns, peerId, isOnline, options: constructorOptions }) => { /** * Given a key, query the DHT for its best value. * * @param {string} name - ipns name to resolve. Defaults to your node's peerID. - * @param {Object} options - ipfs resolve options. - * @param {boolean} options.nocache - do not use cached entries. - * @param {boolean} options.recursive - resolve until the result is not an IPNS name. - * @param {function(Error)} [callback] - * @returns {Promise|void} + * @param {ResolveOptions} [options] + * @returns {AsyncIterable} + * @example + * ```js + * // The IPNS address you want to resolve. + * const addr = '/ipns/ipfs.io' + * + * for await (const name of ipfs.name.resolve(addr)) { + * console.log(name) + * } + * // Logs: /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm + * ``` */ - return withTimeoutOption(async function * resolve (name, options) { // eslint-disable-line require-await + async function * resolve (name, options = {}) { // eslint-disable-line require-await options = mergeOptions({ nocache: false, recursive: true - }, options || {}) + }, options) const { offline } = constructorOptions @@ -70,7 +80,7 @@ module.exports = ({ dns, ipns, peerId, isOnline, options: constructorOptions }) } catch (err) { // lets check if we have a domain ex. /ipns/ipfs.io and resolve with dns if (isDomain(hash)) { - yield appendRemainder(dns(hash, options), remainder) + yield appendRemainder(await dns(hash, options), remainder) return } @@ -85,6 +95,20 @@ module.exports = ({ dns, ipns, peerId, isOnline, options: constructorOptions }) } // TODO: convert ipns.resolve to return an iterator - yield appendRemainder(ipns.resolve(`/${namespace}/${hash}`, options), remainder) - }) + yield appendRemainder(await ipns.resolve(`/${namespace}/${hash}`, options), remainder) + } + + return withTimeoutOption(resolve) } + +/** + * IPFS resolve options. + * + * @typedef {ResolveSettings & AbortOptions} ResolveOptions + * + * @typedef {Object} ResolveSettings + * @property {boolean} [options.nocache=false] - do not use cached entries. + * @property {boolean} [options.recursive=true] - resolve until the result is not an IPNS name. + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/object/get.js b/packages/ipfs-core/src/components/object/get.js index 8d96a18e24..70b8d6785c 100644 --- a/packages/ipfs-core/src/components/object/get.js +++ b/packages/ipfs-core/src/components/object/get.js @@ -20,9 +20,7 @@ function normalizeMultihash (multihash, enc) { } module.exports = ({ ipld, preload }) => { - return withTimeoutOption(async function get (multihash, options) { // eslint-disable-line require-await - options = options || {} - + return withTimeoutOption(async function get (multihash, options = {}) { // eslint-disable-line require-await let mh, cid try { diff --git a/packages/ipfs-core/src/components/object/links.js b/packages/ipfs-core/src/components/object/links.js index 19c02532ba..8d9afb1929 100644 --- a/packages/ipfs-core/src/components/object/links.js +++ b/packages/ipfs-core/src/components/object/links.js @@ -36,9 +36,7 @@ function findLinks (node, links = []) { } module.exports = ({ dag }) => { - return withTimeoutOption(async function links (multihash, options) { - options = options || {} - + return withTimeoutOption(async function links (multihash, options = {}) { const cid = new CID(multihash) const result = await dag.get(cid, options) diff --git a/packages/ipfs-core/src/components/object/put.js b/packages/ipfs-core/src/components/object/put.js index 48b901b13a..cbbe4f2e23 100644 --- a/packages/ipfs-core/src/components/object/put.js +++ b/packages/ipfs-core/src/components/object/put.js @@ -47,9 +47,7 @@ function parseProtoBuffer (buf) { } module.exports = ({ ipld, gcLock, preload }) => { - return withTimeoutOption(async function put (obj, options) { - options = options || {} - + return withTimeoutOption(async function put (obj, options = {}) { const encoding = options.enc let node diff --git a/packages/ipfs-core/src/components/object/stat.js b/packages/ipfs-core/src/components/object/stat.js index a9aed2b593..511415c4c7 100644 --- a/packages/ipfs-core/src/components/object/stat.js +++ b/packages/ipfs-core/src/components/object/stat.js @@ -5,9 +5,7 @@ const { withTimeoutOption } = require('../../utils') module.exports = ({ ipld, preload }) => { const get = require('./get')({ ipld, preload }) - return withTimeoutOption(async function stat (multihash, options) { - options = options || {} - + return withTimeoutOption(async function stat (multihash, options = {}) { const node = await get(multihash, options) const serialized = dagPB.util.serialize(node) const cid = await dagPB.util.cid(serialized, { diff --git a/packages/ipfs-core/src/components/pin/add-all.js b/packages/ipfs-core/src/components/pin/add-all.js index 4a5559022b..24889323e3 100644 --- a/packages/ipfs-core/src/components/pin/add-all.js +++ b/packages/ipfs-core/src/components/pin/add-all.js @@ -4,12 +4,39 @@ const { resolvePath, withTimeoutOption } = require('../../utils') const PinManager = require('./pin-manager') const { PinTypes } = PinManager + +/** @type {(source:Source) => AsyncIterable} */ const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') +/** + * + * @param {Object} config + * @param {import('..').GCLock} config.gcLock + * @param {import('..').DAG} config.dag + * @param {import('./pin-manager')} config.pinManager + */ module.exports = ({ pinManager, gcLock, dag }) => { - return withTimeoutOption(async function * addAll (source, options) { - options = options || {} - + /** + * Adds multiple IPFS objects to the pinset and also stores it to the IPFS + * repo. pinset is the set of hashes currently pinned (not gc'able) + * + * @param {Source} source - One or more CIDs or IPFS Paths to pin in your repo + * @param {AddOptions} [options] + * @returns {AsyncIterable} - CIDs that were pinned. + * @example + * ```js + * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * for await (const cid of ipfs.pin.addAll([cid])) { + * console.log(cid) + * } + * // Logs: + * // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * ``` + */ + async function * addAll (source, options = {}) { + /** + * @returns {AsyncIterable} + */ const pinAdd = async function * () { for await (const { path, recursive, metadata } of normaliseInput(source)) { const cid = await resolvePath(dag, path) @@ -48,5 +75,26 @@ module.exports = ({ pinManager, gcLock, dag }) => { } finally { release() } - }) + } + + return withTimeoutOption(addAll) } + +/** + * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Source} Source + * @typedef {import('ipfs-core-utils/src/pins/normalise-input').Pin} PinTarget + * + * @typedef {AddSettings & AbortOptions} AddOptions + * + * @typedef {Object} AddSettings + * @property {boolean} [lock] + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * + * @typedef {import('..').CID} CID + */ + +/** + * @template T + * @typedef {Iterable|AsyncIterable} AwaitIterable + */ diff --git a/packages/ipfs-core/src/components/pin/add.js b/packages/ipfs-core/src/components/pin/add.js index 4e5ee88dd4..a51e9d3807 100644 --- a/packages/ipfs-core/src/components/pin/add.js +++ b/packages/ipfs-core/src/components/pin/add.js @@ -2,11 +2,25 @@ const last = require('it-last') -module.exports = ({ addAll }) => { - return async function add (path, options) { // eslint-disable-line require-await - return last(addAll({ - path, - ...options - }, options)) - } -} +/** + * @param {Object} config + * @param {ReturnType} config.addAll + */ +module.exports = ({ addAll }) => + /** + * @param {CID|string} path + * @param {AddOptions & AbortOptions} [options] + * @returns {Promise} + */ + async (path, options = {}) => + /** @type {CID} - Need to loosen check here because it could be void */ + (await last(addAll({ path, ...options }, options))) + +/** + * @typedef {Object} AddOptions + * @property {boolean} [lock] + * @property {boolean} [recursive] - Recursively pin all links contained by the object + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + */ diff --git a/packages/ipfs-core/src/components/pin/ls.js b/packages/ipfs-core/src/components/pin/ls.js index 229175c72a..179585d620 100644 --- a/packages/ipfs-core/src/components/pin/ls.js +++ b/packages/ipfs-core/src/components/pin/ls.js @@ -19,13 +19,47 @@ function toPin (type, cid, metadata) { return output } +/** + * @param {Object} config + * @param {import('./pin-manager')} config.pinManager + * @param {import('../index').DAG} config.dag + */ module.exports = ({ pinManager, dag }) => { - return withTimeoutOption(async function * ls (options = {}) { + /** + * List all the objects pinned to local storage + * + * @param {LsOptions} [options] + * @returns {AsyncIterable} + * @example + * ```js + * for await (const { cid, type } of ipfs.pin.ls()) { + * console.log({ cid, type }) + * } + * // { cid: CID(Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E), type: 'recursive' } + * // { cid: CID(QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ), type: 'indirect' } + * // { cid: CID(QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R), type: 'indirect' } + * + * const paths = [ + * CID.from('Qmc5..'), + * CID.from('QmZb..'), + * CID.from('QmSo..') + * ] + * for await (const { cid, type } of ipfs.pin.ls({ paths })) { + * console.log({ cid, type }) + * } + * // { cid: CID(Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E), type: 'recursive' } + * // { cid: CID(QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ), type: 'indirect' } + * // { cid: CID(QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R), type: 'indirect' } + * ``` + */ + async function * ls (options = {}) { + /** @type {PinQueryType} */ let type = PinTypes.all if (options.type) { type = options.type if (typeof options.type === 'string') { + // @ts-ignore - Can't infer that string returned by toLowerCase() is PinQueryType type = options.type.toLowerCase() } @@ -72,6 +106,8 @@ module.exports = ({ pinManager, dag }) => { } if (type === PinTypes.indirect || type === PinTypes.all) { + // @ts-ignore - LsSettings & AbortOptions have no properties in common + // with type { preload?: boolean } for await (const cid of pinManager.indirectKeys(options)) { yield toPin(PinTypes.indirect, cid) } @@ -82,5 +118,25 @@ module.exports = ({ pinManager, dag }) => { yield toPin(PinTypes.direct, cid, metadata) } } - }) + } + + return withTimeoutOption(ls) } + +/** + * @typedef {LsSettings & AbortOptions} LsOptions + * + * @typedef {Object} LsSettings + * @property {string[]|CID[]} [paths] - CIDs or IPFS paths to search for in the pinset. + * @property {PinQueryType} [type] - Filter by this type of pin ("recursive", "direct" or "indirect") + * + * @typedef {Object} LsEntry + * @property {CID} cid - CID of the pinned node + * @property {PinType} type - Pin type ("recursive", "direct" or "indirect") + * + * @typedef {import('./pin-manager').PinType} PinType + * @typedef {import('./pin-manager').PinQueryType} PinQueryType + * + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('..').CID} CID + */ diff --git a/packages/ipfs-core/src/components/pin/pin-manager.js b/packages/ipfs-core/src/components/pin/pin-manager.js index 84112e63ab..9c99ee5d15 100644 --- a/packages/ipfs-core/src/components/pin/pin-manager.js +++ b/packages/ipfs-core/src/components/pin/pin-manager.js @@ -32,11 +32,20 @@ function keyToMultihash (key) { return encoder.decode(key.toString().slice(1)) } +/** + * @typedef {'direct'|'recursive'|'indirect'} PinType + * @typedef {PinType|'all'} PinQueryType + */ + const PinTypes = { - direct: 'direct', - recursive: 'recursive', - indirect: 'indirect', - all: 'all' + /** @type {'direct'} */ + direct: ('direct'), + /** @type {'recursive'} */ + recursive: ('recursive'), + /** @type {'indirect'} */ + indirect: ('indirect'), + /** @type {'all'} */ + all: ('all') } class PinManager { @@ -57,7 +66,7 @@ class PinManager { yield * this._walkDag(link.Hash, { preload }) } } else if (cid.codec === 'dag-cbor') { - for (const [_, childCid] of dagCborLinks(node)) { // eslint-disable-line no-unused-vars + for (const [, childCid] of dagCborLinks(node)) { yield childCid yield * this._walkDag(childCid, { preload }) } @@ -152,6 +161,10 @@ class PinManager { } } + /** + * @param {Object} options + * @param {boolean} [options.preload] + */ async * indirectKeys ({ preload }) { for await (const { cid } of this.recursiveKeys()) { for await (const childCid of this._walkDag(cid, { preload })) { diff --git a/packages/ipfs-core/src/components/pin/rm-all.js b/packages/ipfs-core/src/components/pin/rm-all.js index 39298fbd7e..06f0887fe9 100644 --- a/packages/ipfs-core/src/components/pin/rm-all.js +++ b/packages/ipfs-core/src/components/pin/rm-all.js @@ -4,13 +4,37 @@ const normaliseInput = require('ipfs-core-utils/src/pins/normalise-input') const { resolvePath, withTimeoutOption } = require('../../utils') const { PinTypes } = require('./pin-manager') +/** + * @param {Object} config + * @param {import('./pin-manager')} config.pinManager + * @param {import('..').GCLock} config.gcLock + * @param {import('..').DAG} config.dag + */ module.exports = ({ pinManager, gcLock, dag }) => { - return withTimeoutOption(async function * rm (paths, options = {}) { + /** + * Unpin one or more blocks from your repo + * + * @param {Source} source - Unpin all pins from the source + * @param {AbortOptions} [_options] + * @returns {AsyncIterable} + * @example + * ```js + * const source = [ + * CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * ] + * for await (const cid of ipfs.pin.rmAll(source)) { + * console.log(cid) + * } + * // prints the CIDs that were unpinned + * // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * ``` + */ + async function * rmAll (source, _options = {}) { const release = await gcLock.readLock() try { // verify that each hash can be unpinned - for await (const { path, recursive } of normaliseInput(paths)) { + for await (const { path, recursive } of normaliseInput(source)) { const cid = await resolvePath(dag, path) const { pinned, reason } = await pinManager.isPinnedWithType(cid, PinTypes.all) @@ -42,5 +66,13 @@ module.exports = ({ pinManager, gcLock, dag }) => { } finally { release() } - }) + } + + return withTimeoutOption(rmAll) } + +/** + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + * @typedef {import('./add-all').Source} Source + */ diff --git a/packages/ipfs-core/src/components/pin/rm.js b/packages/ipfs-core/src/components/pin/rm.js index 12a5f3e63d..92bd4a8961 100644 --- a/packages/ipfs-core/src/components/pin/rm.js +++ b/packages/ipfs-core/src/components/pin/rm.js @@ -2,11 +2,36 @@ const last = require('it-last') -module.exports = ({ rmAll }) => { - return async function rm (path, options) { // eslint-disable-line require-await - return last(rmAll({ - path, - ...options - }, options)) - } -} +/** + * @param {Object} config + * @param {ReturnType} config.rmAll + */ +module.exports = ({ rmAll }) => + /** + * Unpin this block from your repo + * + * @param {string|CID} path - CID or IPFS Path to unpin. + * @param {RmOptions} [options] + * @returns {Promise} - The CIDs that was unpinned + * @example + * ```js + * const cid = CID.from('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * const result = await ipfs.pin.rm(cid) + * console.log(result) + * // prints the CID that was unpinned + * // CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') + * ``` + */ + async (path, options) => + /** @type {CID} - Need to loosen check here because it could be void */ + (await last(rmAll({ path, ...options }, options))) + +/** + * @typedef {RmSettings & AbortOptions} RmOptions + * + * @typedef {Object} RmSettings + * @property {boolean} [recursive=true] - Recursively unpin the object linked + * + * @typedef {import('..').CID} CID + * @typedef {import('../../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/ping.js b/packages/ipfs-core/src/components/ping.js index 97cdc81072..24ed95dbff 100644 --- a/packages/ipfs-core/src/components/ping.js +++ b/packages/ipfs-core/src/components/ping.js @@ -1,12 +1,33 @@ 'use strict' const PeerId = require('peer-id') +/** @type {{success:true, time:0, text: ''}} */ const basePacket = { success: true, time: 0, text: '' } const { withTimeoutOption } = require('../utils') +/** + * @param {Object} config + * @param {import('libp2p')} config.libp2p + */ module.exports = ({ libp2p }) => { - return withTimeoutOption(async function * ping (peerId, options) { - options = options || {} + /** + * Send echo request packets to IPFS hosts. + * + * @param {PeerId} peerId - The remote peer to send packets to + * @param {PingOptions} [options] + * @returns {AsyncIterable} + * @example + * ```js + * for await (const res of ipfs.ping('Qmhash')) { + * if (res.time) { + * console.log(`Pong received: time=${res.time} ms`) + * } else { + * console.log(res.text) + * } + * } + * ``` + */ + async function * ping (peerId, options = {}) { options.count = options.count || 10 if (!PeerId.isPeerId(peerId)) { @@ -40,5 +61,36 @@ module.exports = ({ libp2p }) => { const average = totalTime / packetCount yield { ...basePacket, text: `Average latency: ${average}ms` } } - }) + } + + return withTimeoutOption(ping) } + +/** + * @typedef {Pong|PingFailure|StatusUpdate} Packet + * Note that not all ping response objects are "pongs". + * A "pong" message can be identified by a truthy success property and an empty + * text property. Other ping responses are failures or status updates. + * + * @typedef {Object} Pong + * @property {true} success + * @property {number} time + * @property {''} text + * + * @typedef {Object} PingFailure + * @property {false} success + * @property {number} time + * @property {string} text + * + * @typedef {Object} StatusUpdate + * @property {true} success + * @property {0} time + * @property {string} text + * + * @typedef {PingSettings & AbortOptions} PingOptions + * + * @typedef {Object} PingSettings + * @property {number} [count=10] - The number of ping messages to send + * + * @typedef {import('../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/pubsub.js b/packages/ipfs-core/src/components/pubsub.js index 71b43d4370..531a85e359 100644 --- a/packages/ipfs-core/src/components/pubsub.js +++ b/packages/ipfs-core/src/components/pubsub.js @@ -7,7 +7,7 @@ module.exports = ({ libp2p }) => { return { subscribe: withTimeoutOption((...args) => libp2p.pubsub.subscribe(...args)), unsubscribe: withTimeoutOption((...args) => libp2p.pubsub.unsubscribe(...args)), - publish: withTimeoutOption(async (topic, data, options) => { + publish: withTimeoutOption(async (topic, data, _options) => { if (!data) { throw errCode(new Error('argument "data" is required'), 'ERR_ARG_REQUIRED') } diff --git a/packages/ipfs-core/src/components/refs/index.js b/packages/ipfs-core/src/components/refs/index.js index 0a574ff327..669fd0cbe1 100644 --- a/packages/ipfs-core/src/components/refs/index.js +++ b/packages/ipfs-core/src/components/refs/index.js @@ -14,9 +14,7 @@ const Format = { } module.exports = function ({ ipld, resolve, preload }) { - return withTimeoutOption(async function * refs (ipfsPath, options) { // eslint-disable-line require-await - options = options || {} - + return withTimeoutOption(async function * refs (ipfsPath, options = {}) { // eslint-disable-line require-await if (options.maxDepth === 0) { return } diff --git a/packages/ipfs-core/src/components/repo/gc.js b/packages/ipfs-core/src/components/repo/gc.js index b3c0864a0d..c2b8b26afa 100644 --- a/packages/ipfs-core/src/components/repo/gc.js +++ b/packages/ipfs-core/src/components/repo/gc.js @@ -11,9 +11,21 @@ const multibase = require('multibase') // Limit on the number of parallel block remove operations const BLOCK_RM_CONCURRENCY = 256 -// Perform mark and sweep garbage collection +/** + * Perform mark and sweep garbage collection + * + * @param {Object} config + * @param {import('..').GCLock} config.gcLock + * @param {import('..').Pin} config.pin + * @param {import('..').Refs} config.refs + * @param {import('..').IPFSRepo} config.repo + */ module.exports = ({ gcLock, pin, refs, repo }) => { - return withTimeoutOption(async function * gc (options = {}) { + /** + * @param {AbortOptions} [_options] + * @returns {AsyncIterable} + */ + async function * gc (_options = {}) { const start = Date.now() log('Creating set of marked blocks') @@ -26,13 +38,15 @@ module.exports = ({ gcLock, pin, refs, repo }) => { const blockKeys = repo.blocks.query({ keysOnly: true }) // Delete blocks that are not being used - yield * deleteUnmarkedBlocks({ repo, refs }, markedSet, blockKeys) + yield * deleteUnmarkedBlocks({ repo }, markedSet, blockKeys) log(`Complete (${Date.now() - start}ms)`) } finally { release() } - }) + } + + return withTimeoutOption(gc) } // Get Set of CIDs of blocks to keep @@ -67,7 +81,7 @@ async function createMarkedSet ({ pin, refs, repo }) { } // Delete all blocks that are not marked as in use -async function * deleteUnmarkedBlocks ({ repo, refs }, markedSet, blockKeys) { +async function * deleteUnmarkedBlocks ({ repo }, markedSet, blockKeys) { // Iterate through all blocks and find those that are not in the marked set // blockKeys yields { key: Key() } let blocksCount = 0 @@ -104,3 +118,19 @@ async function * deleteUnmarkedBlocks ({ repo, refs }, markedSet, blockKeys) { log(`Marked set has ${markedSet.size} unique blocks. Blockstore has ${blocksCount} blocks. ` + `Deleted ${removedBlocksCount} blocks.`) } + +/** + * @typedef {import('../../utils').AbortOptions} AbortOptions + * + * @typedef {Err|BlockID} Notification + * + * @typedef {Object} Err + * @property {void} [cid] + * @property {Error} err + * + * @typedef {Object} BlockID + * @property {CID} cid + * @property {void} [err] + * + * @typedef {import('interface-datastore').Key} Key + */ diff --git a/packages/ipfs-core/src/components/resolve.js b/packages/ipfs-core/src/components/resolve.js index 8a89c434f9..9b14d4516c 100644 --- a/packages/ipfs-core/src/components/resolve.js +++ b/packages/ipfs-core/src/components/resolve.js @@ -6,35 +6,54 @@ const { cidToString } = require('ipfs-core-utils/src/cid') const { withTimeoutOption } = require('../utils') /** - * @typedef {object} ResolveOptions - * @property {string} [cidBase='base58btc'] - Multibase codec name the CID in the resolved path will be encoded with - * @property {boolean} [recursive=true] - Resolve until the result is an IPFS name - */ - -/** - * Resolve the value of names to IPFS - * - * There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers and resolves them to the referenced item. - * - * @template {Record} ExtraOptions - * @callback Resolve - * @param {string} path - The name to resolve - * @param {ResolveOptions & import('../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - A string representing the resolved name - */ - -/** - * IPFS Resolve factory - * - * @param {object} config - * @param {IPLD} config.ipld - An instance of IPLD - * @param {NameApi} [config.name] - An IPFS core interface name API - * @returns {Resolve<{}>} + * @param {Object} config + * @param {import('.').IPLD} config.ipld - An instance of IPLD + * @param {import('.').Name} [config.name] - An IPFS core interface name API */ module.exports = ({ ipld, name }) => { - return withTimeoutOption(async function resolve (path, opts) { - opts = opts || {} - + /** + * Resolve the value of names to IPFS + * + * There are a number of mutable name protocols that can link among themselves + * and into IPNS. For example IPNS references can (currently) point at an IPFS + * object, and DNS links can point at other DNS links, IPNS entries, or IPFS + * objects. This command accepts any of these identifiers and resolves them + * to the referenced item. + * + * @param {string} path - The name to resolve + * @param {ResolveOptions} [opts] + * @returns {Promise} - A string representing the resolved name + * @example + * ```js + * // Resolve the value of your identity: + * const name = '/ipns/QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy' + * + * const res = await ipfs.resolve(name) + * console.log(res) + * // Logs: /ipfs/Qmcqtw8FfrVSBaRmbWwHxt3AuySBhJLcvmFYi3Lbc4xnwj + * + * // Resolve the value of another name recursively: + * const name = '/ipns/QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n' + * + * // Where: + * // /ipns/QmbCMUZw6JFeZ7Wp9jkzbye3Fzp2GGcPgC3nmeUjfVF87n + * // ...resolves to: + * // /ipns/QmatmE9msSfkKxoffpHwNLNKgwZG8eT9Bud6YoPab52vpy + * // ...which in turn resolves to: + * // /ipfs/Qmcqtw8FfrVSBaRmbWwHxt3AuySBhJLcvmFYi3Lbc4xnwj + * + * const res = await ipfs.resolve(name, { recursive: true }) + * console.log(res) + * // Logs: /ipfs/Qmcqtw8FfrVSBaRmbWwHxt3AuySBhJLcvmFYi3Lbc4xnwj + * + * // Resolve the value of an IPFS path: + * const name = '/ipfs/QmeZy1fGbwgVSrqbfh9fKQrAWgeyRnj7h8fsHS1oy3k99x/beep/boop' + * const res = await ipfs.resolve(name) + * console.log(res) + * // Logs: /ipfs/QmYRMjyvAiHKN9UTi8Bzt1HUspmSRD8T8DwxfSMzLgBon1 + * ``` + */ + async function resolve (path, opts = {}) { if (!isIpfs.path(path)) { throw new Error('invalid argument ' + path) } @@ -71,5 +90,17 @@ module.exports = ({ ipld, name }) => { } return `/ipfs/${cidToString(value, { base: opts.cidBase })}${remainderPath ? '/' + remainderPath : ''}` - }) + } + + return withTimeoutOption(resolve) } + +/** + * @typedef {ResolveSettings & AbortOptions} ResolveOptions + * + * @typedef {Object} ResolveSettings + * @property {boolean} [recursive=true] - Resolve until result is an IPFS name. + * @property {string} [cidBase='base58btc'] - Multibase codec name the CID in the resolved path will be encoded with. + * + * @typedef {import('../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/start.js b/packages/ipfs-core/src/components/start.js index 8bff2aba7d..ec72580c83 100644 --- a/packages/ipfs-core/src/components/start.js +++ b/packages/ipfs-core/src/components/start.js @@ -15,6 +15,21 @@ const { withTimeoutOption } = require('../utils') const WEBSOCKET_STAR_PROTO_CODE = 479 +/** + * @param {Object} config + * @param {APIManager} config.apiManager + * @param {StartOptions} config.options + * @param {IPFSBlockService} config.blockService + * @param {GCLock} config.gcLock + * @param {InitOptions} config.initOptions + * @param {IPLD} config.ipld + * @param {Keychain} config.keychain + * @param {PeerId} config.peerId + * @param {PinManager} config.pinManager + * @param {Preload} config.preload + * @param {Print} config.print + * @param {IPFSRepo} config.repo + */ module.exports = ({ apiManager, options: constructorOptions, @@ -28,142 +43,142 @@ module.exports = ({ preload, print, repo -}) => withTimeoutOption(async function start () { - const startPromise = defer() - startPromise.promise.catch((err) => log(err)) +}) => { + async function start () { + const startPromise = defer() + startPromise.promise.catch((err) => log(err)) - const { cancel } = apiManager.update({ start: () => startPromise.promise }) + const { cancel } = apiManager.update({ start: () => startPromise.promise }) - try { + try { // The repo may be closed if previously stopped - if (repo.closed) { - await repo.open() - } - - const config = await repo.config.getAll() - const addrs = [] - - if (config.Addresses && config.Addresses.Swarm) { - config.Addresses.Swarm.forEach(addr => { - let ma = multiaddr(addr) - - // Temporary error for users migrating using websocket-star multiaddrs for listenning on libp2p - // websocket-star support was removed from ipfs and libp2p - if (ma.protoCodes().includes(WEBSOCKET_STAR_PROTO_CODE)) { - throw errCode(new Error('websocket-star swarm addresses are not supported. See https://github.com/ipfs/js-ipfs/issues/2779'), 'ERR_WEBSOCKET_STAR_SWARM_ADDR_NOT_SUPPORTED') - } - - // multiaddrs that go via a signalling server or other intermediary (e.g. stardust, - // webrtc-star) can have the intermediary's peer ID in the address, so append our - // peer ID to the end of it - const maId = ma.getPeerId() - if (maId && maId !== peerId.toB58String()) { - ma = ma.encapsulate(`/p2p/${peerId.toB58String()}`) - } - - addrs.push(ma) + if (repo.closed) { + await repo.open() + } + + const config = await repo.config.getAll() + const addrs = [] + + if (config.Addresses && config.Addresses.Swarm) { + config.Addresses.Swarm.forEach(addr => { + let ma = multiaddr(addr) + + // Temporary error for users migrating using websocket-star multiaddrs for listenning on libp2p + // websocket-star support was removed from ipfs and libp2p + if (ma.protoCodes().includes(WEBSOCKET_STAR_PROTO_CODE)) { + throw errCode(new Error('websocket-star swarm addresses are not supported. See https://github.com/ipfs/js-ipfs/issues/2779'), 'ERR_WEBSOCKET_STAR_SWARM_ADDR_NOT_SUPPORTED') + } + + // multiaddrs that go via a signalling server or other intermediary (e.g. stardust, + // webrtc-star) can have the intermediary's peer ID in the address, so append our + // peer ID to the end of it + const maId = ma.getPeerId() + if (maId && maId !== peerId.toB58String()) { + ma = ma.encapsulate(`/p2p/${peerId.toB58String()}`) + } + + addrs.push(ma) + }) + } + + const libp2p = Components.libp2p({ + options: constructorOptions, + repo, + peerId: peerId, + multiaddrs: addrs, + config }) - } - const libp2p = Components.libp2p({ - options: constructorOptions, - repo, - peerId: peerId, - multiaddrs: addrs, - config - }) - - libp2p.keychain && await libp2p.loadKeychain() - - await libp2p.start() - - libp2p.transportManager.getAddrs().forEach(ma => print(`Swarm listening on ${ma}/p2p/${peerId.toB58String()}`)) - - const ipnsRouting = routingConfig({ libp2p, repo, peerId, options: constructorOptions }) - const ipns = new IPNS(ipnsRouting, repo.datastore, peerId, keychain, { pass: initOptions.pass }) - const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) + libp2p.keychain && await libp2p.loadKeychain() - await bitswap.start() + await libp2p.start() - blockService.setExchange(bitswap) + libp2p.transportManager.getAddrs().forEach(ma => print(`Swarm listening on ${ma}/p2p/${peerId.toB58String()}`)) - const dag = { - get: Components.dag.get({ ipld, preload }), - resolve: Components.dag.resolve({ ipld, preload }), - tree: Components.dag.tree({ ipld, preload }) - } + const ipnsRouting = routingConfig({ libp2p, repo, peerId, options: constructorOptions }) + const ipns = new IPNS(ipnsRouting, repo.datastore, peerId, keychain, { pass: initOptions.pass }) + const bitswap = new Bitswap(libp2p, repo.blocks, { statsEnabled: true }) - const pinAddAll = Components.pin.addAll({ pinManager, gcLock, dag }) - const pinRmAll = Components.pin.rmAll({ pinManager, gcLock, dag }) + await bitswap.start() - const pin = { - add: Components.pin.add({ addAll: pinAddAll }), - addAll: pinAddAll, - ls: Components.pin.ls({ pinManager, dag }), - rm: Components.pin.rm({ rmAll: pinRmAll }), - rmAll: pinRmAll - } + blockService.setExchange(bitswap) - // FIXME: resolve this circular dependency - dag.put = Components.dag.put({ ipld, pin, gcLock, preload }) + const dag = { + get: Components.dag.get({ ipld, preload }), + resolve: Components.dag.resolve({ ipld, preload }), + tree: Components.dag.tree({ ipld, preload }), + // FIXME: resolve this circular dependency + get put () { + const put = Components.dag.put({ ipld, pin, gcLock, preload }) + Object.defineProperty(this, 'put', { value: put }) + return put + } + } + + const pinAddAll = Components.pin.addAll({ pinManager, gcLock, dag }) + const pinRmAll = Components.pin.rmAll({ pinManager, gcLock, dag }) + + const pin = { + add: Components.pin.add({ addAll: pinAddAll }), + addAll: pinAddAll, + ls: Components.pin.ls({ pinManager, dag }), + rm: Components.pin.rm({ rmAll: pinRmAll }), + rmAll: pinRmAll + } + + const block = { + get: Components.block.get({ blockService, preload }), + put: Components.block.put({ blockService, pin, gcLock, preload }), + rm: Components.block.rm({ blockService, gcLock, pinManager }), + stat: Components.block.stat({ blockService, preload }) + } + + const files = Components.files({ ipld, block, blockService, repo, preload, options: constructorOptions }) + const mfsPreload = createMfsPreload({ files, preload, options: constructorOptions.preload }) + + await Promise.all([ + ipns.republisher.start(), + preload.start(), + mfsPreload.start() + ]) + + const api = createApi({ + apiManager, + bitswap, + block, + blockService, + config, + constructorOptions, + dag, + files, + gcLock, + initOptions, + ipld, + ipns, + keychain, + libp2p, + mfsPreload, + peerId, + pin, + preload, + print, + repo + }) - const block = { - get: Components.block.get({ blockService, preload }), - put: Components.block.put({ blockService, pin, gcLock, preload }), - rm: Components.block.rm({ blockService, gcLock, pinManager }), - stat: Components.block.stat({ blockService, preload }) + const { api: startedApi } = apiManager.update(api, () => undefined) + startPromise.resolve(startedApi) + return startedApi + } catch (err) { + cancel() + startPromise.reject(err) + throw err } - - const files = Components.files({ ipld, block, blockService, repo, preload, options: constructorOptions }) - const mfsPreload = createMfsPreload({ files, preload, options: constructorOptions.preload }) - - await Promise.all([ - ipns.republisher.start(), - preload.start(), - mfsPreload.start() - ]) - - const api = createApi({ - apiManager, - bitswap, - block, - blockService, - config, - constructorOptions, - dag, - files, - gcLock, - initOptions, - ipld, - ipns, - keychain, - libp2p, - mfsPreload, - peerId, - pin, - pinManager, - preload, - print, - repo - }) - - apiManager.update(api, () => undefined) - - /** @type {typeof api} */ - const startedApi = apiManager.api - startPromise.resolve(startedApi) - return startedApi - } catch (err) { - cancel() - startPromise.reject(err) - throw err } -}) + return withTimeoutOption(start) +} /** - * @template LIBP2P - * @template BlockAPI, DagAPI, FilesAPI, PinAPI - * @param {{ [x: string]: any; libp2p: LIBP2P; block: BlockAPI; dag: DagAPI; files: FilesAPI; pin: PinAPI; }} options + * @param {CreateAPIConfig} config */ function createApi ({ apiManager, @@ -183,7 +198,6 @@ function createApi ({ mfsPreload, peerId, pin, - pinManager, preload, print, repo @@ -230,7 +244,7 @@ function createApi ({ state: Components.name.pubsub.state({ ipns, options: constructorOptions }), subs: Components.name.pubsub.subs({ ipns, options: constructorOptions }) }, - publish: Components.name.publish({ ipns, dag, peerId, isOnline, keychain, options: constructorOptions }), + publish: Components.name.publish({ ipns, dag, peerId, isOnline, keychain }), resolve: Components.name.resolve({ dns, ipns, peerId, isOnline, options: constructorOptions }) } const resolve = Components.resolve({ name, ipld }) @@ -342,3 +356,48 @@ function createApi ({ return api } + +/** + * @typedef {Object} CreateAPIConfig + * @property {APIManager} apiManager + * @property {Bitswap} [bitswap] + * @property {Block} block + * @property {IPFSBlockService} blockService + * @property {Config} config + * @property {StartOptions} constructorOptions + * @property {DAG} dag + * @property {Files} [files] + * @property {GCLock} gcLock + * @property {InitOptions} initOptions + * @property {IPLD} ipld + * @property {import('../ipns')} ipns + * @property {Keychain} keychain + * @property {LibP2P} libp2p + * @property {MFSPreload} mfsPreload + * @property {PeerId} peerId + * @property {Pin} pin + * @property {Preload} preload + * @property {Print} print + * @property {IPFSRepo} repo + * + * @typedef {(...args:any[]) => void} Print + * + * @typedef {import('./init').InitOptions} InitOptions + * @typedef {import('./init').ConstructorOptions} StartOptions + * @typedef {import('./init').Keychain} Keychain + * @typedef {import('../api-manager')} APIManager + * @typedef {import('./pin/pin-manager')} PinManager + * @typedef {import('../mfs-preload').MFSPreload} MFSPreload + * @typedef {import('.').IPFSBlockService} IPFSBlockService + * @typedef {import('.').GCLock} GCLock + * @typedef {import('.')} IPLD + * @typedef {import('.').PeerId} PeerId + * @typedef {import('.').Preload} Preload + * @typedef {import('.').IPFSRepo} IPFSRepo + * @typedef {import('.').LibP2P} LibP2P + * @typedef {import('.').Pin} Pin + * @typedef {import('.').Files} Files + * @typedef {import('.').DAG} DAG + * @typedef {import('.').Config} Config + * @typedef {import('.').Block} Block + */ diff --git a/packages/ipfs-core/src/components/stats/bw.js b/packages/ipfs-core/src/components/stats/bw.js index 3f0db4e965..3c2319187c 100644 --- a/packages/ipfs-core/src/components/stats/bw.js +++ b/packages/ipfs-core/src/components/stats/bw.js @@ -1,6 +1,6 @@ 'use strict' -const Big = require('bignumber.js') +const Big = require('bignumber.js').default const parseDuration = require('parse-duration').default const errCode = require('err-code') const { withTimeoutOption } = require('../../utils') @@ -36,9 +36,7 @@ function getBandwidthStats (libp2p, opts) { } module.exports = ({ libp2p }) => { - return withTimeoutOption(async function * (options) { - options = options || {} - + return withTimeoutOption(async function * (options = {}) { if (!options.poll) { yield getBandwidthStats(libp2p, options) return diff --git a/packages/ipfs-core/src/components/stop.js b/packages/ipfs-core/src/components/stop.js index ebe8494353..88e320aaea 100644 --- a/packages/ipfs-core/src/components/stop.js +++ b/packages/ipfs-core/src/components/stop.js @@ -22,46 +22,61 @@ module.exports = ({ preload, print, repo -}) => withTimeoutOption(async function stop () { - const stopPromise = defer() - const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) - - try { - blockService.unsetExchange() - bitswap.stop() - preload.stop() - - await Promise.all([ - ipns.republisher.stop(), - mfsPreload.stop(), - libp2p.stop(), - repo.close() - ]) - - const api = createApi({ - apiManager, - constructorOptions, - blockService, - gcLock, - initOptions, - ipld, - keychain, - peerId, - pinManager, - preload, - print, - repo - }) - - apiManager.update(api, () => { throw new NotStartedError() }) - } catch (err) { - cancel() - stopPromise.reject(err) - throw err +}) => { + /** + * Stops the IPFS node and in case of talking with an IPFS Daemon, it stops + * the process. + * + * @param {AbortOptions} _options + * @returns {Promise} + * @example + * ```js + * await ipfs.stop() + * ``` + */ + async function stop (_options) { + const stopPromise = defer() + const { cancel } = apiManager.update({ stop: () => stopPromise.promise }) + + try { + blockService.unsetExchange() + bitswap.stop() + preload.stop() + + await Promise.all([ + ipns.republisher.stop(), + mfsPreload.stop(), + libp2p.stop(), + repo.close() + ]) + + const api = createApi({ + apiManager, + constructorOptions, + blockService, + gcLock, + initOptions, + ipld, + keychain, + peerId, + pinManager, + preload, + print, + repo + }) + + apiManager.update(api, () => { throw new NotStartedError() }) + } catch (err) { + cancel() + stopPromise.reject(err) + throw err + } + + stopPromise.resolve() } - stopPromise.resolve() -}) + return withTimeoutOption(stop) +} function createApi ({ apiManager, @@ -80,7 +95,13 @@ function createApi ({ const dag = { get: Components.dag.get({ ipld, preload }), resolve: Components.dag.resolve({ ipld, preload }), - tree: Components.dag.tree({ ipld, preload }) + tree: Components.dag.tree({ ipld, preload }), + // FIXME: resolve this circular dependency + get put () { + const put = Components.dag.put({ ipld, pin, gcLock, preload }) + Object.defineProperty(this, 'put', { value: put }) + return put + } } const object = { data: Components.object.data({ ipld, preload }), @@ -108,9 +129,6 @@ function createApi ({ rmAll: pinRmAll } - // FIXME: resolve this circular dependency - dag.put = Components.dag.put({ ipld, pin, gcLock, preload }) - const block = { get: Components.block.get({ blockService, preload }), put: Components.block.put({ blockService, pin, gcLock, preload }), @@ -171,7 +189,7 @@ function createApi ({ pin, refs, repo: { - gc: Components.repo.gc({ gcLock, pin, pinManager, refs, repo }), + gc: Components.repo.gc({ gcLock, pin, refs, repo }), stat: Components.repo.stat({ repo }), version: Components.repo.version({ repo }) }, @@ -208,3 +226,7 @@ function createApi ({ return api } + +/** + * @typedef {import('../utils').AbortOptions} AbortOptions + */ diff --git a/packages/ipfs-core/src/components/swarm/peers.js b/packages/ipfs-core/src/components/swarm/peers.js index f578c6e8c0..4a0654cad7 100644 --- a/packages/ipfs-core/src/components/swarm/peers.js +++ b/packages/ipfs-core/src/components/swarm/peers.js @@ -3,9 +3,7 @@ const { withTimeoutOption } = require('../../utils') module.exports = ({ libp2p }) => { - return withTimeoutOption(async function peers (options) { // eslint-disable-line require-await - options = options || {} - + return withTimeoutOption(async function peers (options = {}) { // eslint-disable-line require-await const verbose = options.v || options.verbose const peers = [] diff --git a/packages/ipfs-core/src/components/version.js b/packages/ipfs-core/src/components/version.js index 4ce7ada447..72b304a62f 100644 --- a/packages/ipfs-core/src/components/version.js +++ b/packages/ipfs-core/src/components/version.js @@ -3,39 +3,45 @@ const pkg = require('../../package.json') const { withTimeoutOption } = require('../utils') -/** - * @typedef {object} VersionObj - An object with the version of the implementation, the commit and the Repo. `js-ipfs` instances will also return the version of `interface-ipfs-core` and `ipfs-http-client` supported by this node - * @property {string} version - * @property {string} repo - * @property {string} [commit] - * @property {string} [interface-ipfs-core] - * @property {string} [ipfs-http-client] - */ - -/** - * Returns the implementation version - * - * @template {Record} ExtraOptions - * @callback Version - * @param {import('../utils').AbortOptions & ExtraOptions} [options] - * @returns {Promise} - */ +// gitHead is defined in published versions +const meta = { gitHead: '', ...pkg } module.exports = ({ repo }) => { /** - * @type {Version<{}>} + * Returns the implementation version + * + * @param {import('../utils').AbortOptions} [options] + * @returns {Promise} + * @example + * ```js + * const version = await ipfs.version() + * console.log(version) + * ``` */ async function version (options) { const repoVersion = await repo.version.get(options) return { - version: pkg.version, + version: meta.version, repo: repoVersion, - commit: pkg.gitHead || '', // is defined in published versions, - 'interface-ipfs-core': pkg.devDependencies['interface-ipfs-core'], - 'ipfs-http-client': pkg.dependencies['ipfs-http-client'] + commit: meta.gitHead, + 'interface-ipfs-core': meta.devDependencies['interface-ipfs-core'] } } return withTimeoutOption(version) } + +/** + * @typedef {object} Version + * An object with the version information for the implementation, + * the commit and the Repo. `js-ipfs` instances will also return + * the version of `interface-ipfs-core` and `ipfs-http-client` + * supported by this node + * + * @property {string} version + * @property {string} repo + * @property {string} [commit] + * @property {string} [interface-ipfs-core] + * @property {string} [ipfs-http-client] + */ diff --git a/packages/ipfs-core/src/index.js b/packages/ipfs-core/src/index.js index a385b2ef93..a32e7f5339 100644 --- a/packages/ipfs-core/src/index.js +++ b/packages/ipfs-core/src/index.js @@ -1,6 +1,8 @@ 'use strict' const log = require('debug')('ipfs') + +/** @type {typeof Object.assign} */ const mergeOptions = require('merge-options') const { isTest } = require('ipfs-utils/src/env') const globSource = require('ipfs-utils/src/files/glob-source') @@ -33,64 +35,14 @@ const getDefaultOptions = () => ({ } }) -/** - * @typedef {'rsa' | 'ed25519' | 'secp256k1'} KeyType - * - * @typedef {object} InitOptions - * @property {boolean} [emptyRepo] - Whether to remove built-in assets, like the instructional tour and empty mutable file system, from the repo. (Default: `false`) - * @property {KeyType} [algorithm] - The type of key to use. (Default: `rsa`) - * @property {number} [bits] - Number of bits to use in the generated key pair (rsa only). (Default: `2048`) - * @property {string | import('peer-id')} [privateKey] - A pre-generated private key to use. Can be either a base64 string or a [PeerId](https://github.com/libp2p/js-peer-id) instance. **NOTE: This overrides `bits`.** - * @property {string} [pass] - A passphrase to encrypt keys. You should generally use the [top-level `pass` option](#optionspass) instead of the `init.pass` option (this one will take its value from the top-level option if not set). - * @property {any[]} [profiles] - Apply profile settings to config. - * @property {boolean} [allowNew] - Set to `false` to disallow initialization if the repo does not already exist. (Default: `true`) - * - * @typedef {object} RelayOptions - * @property {boolean} [enabled] - Enable circuit relay dialer and listener. (Default: `true`) - * @property {object} [hop] - * @property {boolean} [hop.enabled] - Make this node a relay (other nodes can connect *through* it). (Default: `false`) - * @property {boolean} [hop.active] - Make this an *active* relay node. Active relay nodes will attempt to dial a destination peer even if that peer is not yet connected to the relay. (Default: `false`) - * - * @typedef {object} PreloadOptions - * @property {boolean} [enabled] - Enable content preloading (Default: `true`) - * @property {string[]} [addresses] - Multiaddr API addresses of nodes that should preload content. - * - **NOTE:** nodes specified here should also be added to your node's bootstrap address list at `config.Boostrap`. - * - * @typedef {object} ExperimentalOptions - * @property {boolean} [ipnsPubsub] - Enable pub-sub on IPNS. (Default: `false`) - * @property {boolean} [sharding] - Enable directory sharding. Directories that have many child objects will be represented by multiple DAG nodes instead of just one. It can improve lookup performance when a directory has several thousand files or more. (Default: `false`) - */ - -/** - * @typedef { import('ipfs-repo') } IpfsRepo - */ - /** * Creates and returns a ready to use instance of an IPFS node. * - * @template {boolean | InitOptions} INIT - * @template {boolean} START - * - * @param {object} [options] - specify advanced configuration - * @param {string | IpfsRepo} [options.repo] - The file path at which to store the IPFS node’s data. Alternatively, you can set up a customized storage system by providing an [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo) instance. (Default: `'~/.jsipfs'` in Node.js, `'ipfs'` in browsers) - * @param {boolean} [options.repoAutoMigrate] - `js-ipfs` comes bundled with a tool that automatically migrates your IPFS repository when a new version is available. (Default: `true`) - * @param {INIT} [options.init] - Perform repo initialization steps when creating the IPFS node. (Default: `true`) - * - Note that *initializing* a repo is different from creating an instance of [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor sets many special properties when initializing a repo, so you should usually not try and call `repoInstance.init()` yourself. - * @param {START} [options.start] - If `false`, do not automatically start the IPFS node. Instead, you’ll need to manually call [`node.start()`](#nodestart) yourself. (Default: `true`) - * @param {string} [options.pass] - A passphrase to encrypt/decrypt your keys. (Default: `null`) - * @param {boolean} [options.silent] - Prevents all logging output from the IPFS node. (Default: `false`) - * @param {RelayOptions} [options.relay] - Configure circuit relay (see the [circuit relay tutorial](https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) to learn more). (Default: `{ enabled: true, hop: { enabled: false, active: false } }`) - * @param {boolean} [options.offline] - Run ipfs node offline. The node does not connect to the rest of the network but provides a local API. (Default: `false`) - * @param {PreloadOptions} [options.preload] - Configure remote preload nodes. The remote will preload content added on this node, and also attempt to preload objects requested by this node. - * @param {ExperimentalOptions} [options.EXPERIMENTAL] - Enable and configure experimental features. - * @param {object} [options.config] - Modify the default IPFS node config. This object will be *merged* with the default config; it will not replace it. (Default: [`config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-nodejs.js) in Node.js, [`config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-browser.js) in browsers) - * @param {object} [options.ipld] - Modify the default IPLD config. This object will be *merged* with the default config; it will not replace it. Check IPLD [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information on the available options. (Default: [`ipld-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) in browsers) - * @param {object | Function} [options.libp2p] - The libp2p option allows you to build your libp2p node by configuration, or via a bundle function. If you are looking to just modify the below options, using the object format is the quickest way to get the default features of libp2p. If you need to create a more customized libp2p node, such as with custom transports or peer/content routers that need some of the ipfs data on startup, a custom bundle is a great way to achieve this. - * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). - * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) for the list of options libp2p supports. - * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in browsers + * @template {boolean | InitOptions} Init + * @template {boolean} Start + * @param {CreateOptions} [options] */ -async function create (options) { +async function create (options = {}) { options = mergeOptions(getDefaultOptions(), options) // eslint-disable-next-line no-console @@ -108,11 +60,19 @@ async function create (options) { const startedApi = options.start && initializedApi && await initializedApi.start() /** - * @template T, THEN, ELSE - * @typedef {NonNullable extends false - * ? THEN : ELSE } IsFalse + * create returns object that has different API set based on `options.init` + * and `options.start` values. If we just return `startedApi || initializedApi || api` + * TS will infer return type to be ` typeof startedAPI || typeof initializedApi || typeof api` + * which user would in practice act like `api` with all the extra APIs as optionals. + * + * Type trickery below attempts to affect inference by explicitly telling + * what the return type is and when. + * + * @typedef {typeof api} API + * @typedef {NonNullable} InitializedAPI + * @typedef {NonNullable} StartedAPI + * @type {If, API>} */ - /** @type {IsFalse>} */ // @ts-ignore const ipfs = startedApi || initializedApi || api return ipfs @@ -133,3 +93,100 @@ module.exports = { globSource, urlSource } + +/** + * @template {boolean | InitOptions} Init + * @template {boolean} Start + * + * @typedef {Object} CreateOptions + * Options argument can be used to specify advanced configuration. + * @property {RepoOption} [repo='~/.jsipfs'] + * @property {boolean} [repoAutoMigrate=true] - `js-ipfs` comes bundled with a + * tool that automatically migrates your IPFS repository when a new version is + * available. + * @property {Init} [init=true] - Perform repo initialization steps when creating + * the IPFS node. + * Note that *initializing* a repo is different from creating an instance of + * [`ipfs.Repo`](https://github.com/ipfs/js-ipfs-repo). The IPFS constructor + * sets many special properties when initializing a repo, so you should usually + * not try and call `repoInstance.init()` yourself. + * @property {Start} [start=true] - If `false`, do not automatically + * start the IPFS node. Instead, you’ll need to manually call + * [`node.start()`](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs/docs/MODULE.md#nodestart) + * yourself. + * @property {string} [pass=null] - A passphrase to encrypt/decrypt your keys. + * @property {boolean} [silent=false] - Prevents all logging output from the + * IPFS node. (Default: `false`) + * @property {RelayOptions} [relay={ enabled: true, hop: { enabled: false, active: false } }] + * - Configure circuit relay (see the [circuit relay tutorial] + * (https://github.com/ipfs/js-ipfs/tree/master/examples/circuit-relaying) + * to learn more). + * @property {boolean} [offline=false] - Run ipfs node offline. The node does + * not connect to the rest of the network but provides a local API. + * @property {PreloadOptions} [preload] - Configure remote preload nodes. + * The remote will preload content added on this node, and also attempt to + * preload objects requested by this node. + * @property {ExperimentalOptions} [EXPERIMENTAL] - Enable and configure + * experimental features. + * @property {object} [config] - Modify the default IPFS node config. This + * object will be *merged* with the default config; it will not replace it. + * (Default: [`config-nodejs.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-nodejs.js) + * in Node.js, [`config-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/config-browser.js) + * in browsers) + * @property {import('./components').IPLDConfig} [ipld] - Modify the default IPLD config. This object + * will be *merged* with the default config; it will not replace it. Check IPLD + * [docs](https://github.com/ipld/js-ipld#ipld-constructor) for more information + * on the available options. (Default: [`ipld-nodejs.js`] + * (https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-nodejs.js) in Node.js, [`ipld-browser.js`](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs/src/core/runtime/ipld-browser.js) + * in browsers) + * @property {object|Function} [libp2p] - The libp2p option allows you to build + * your libp2p node by configuration, or via a bundle function. If you are + * looking to just modify the below options, using the object format is the + * quickest way to get the default features of libp2p. If you need to create a + * more customized libp2p node, such as with custom transports or peer/content + * routers that need some of the ipfs data on startup, a custom bundle is a + * great way to achieve this. + * - You can see the bundle in action in the [custom libp2p example](https://github.com/ipfs/js-ipfs/tree/master/examples/custom-libp2p). + * - Please see [libp2p/docs/CONFIGURATION.md](https://github.com/libp2p/js-libp2p/blob/master/doc/CONFIGURATION.md) + * for the list of options libp2p supports. + * - Default: [`libp2p-nodejs.js`](../src/core/runtime/libp2p-nodejs.js) + * in Node.js, [`libp2p-browser.js`](../src/core/runtime/libp2p-browser.js) in + * browsers. + */ + +/** + * @typedef {IPFSRepo|string} RepoOption + * The file path at which to store the IPFS node’s data. Alternatively, you + * can set up a customized storage system by providing an `ipfs.Repo` instance. + * + * @example + * ```js + * // Store data outside your user directory + * const node = await IPFS.create({ repo: '/var/ipfs/data' }) + * ``` + * @typedef {import('./components/init').InitOptions} InitOptions + * + * @typedef {object} RelayOptions + * @property {boolean} [enabled] - Enable circuit relay dialer and listener. (Default: `true`) + * @property {object} [hop] + * @property {boolean} [hop.enabled] - Make this node a relay (other nodes can connect *through* it). (Default: `false`) + * @property {boolean} [hop.active] - Make this an *active* relay node. Active relay nodes will attempt to dial a destination peer even if that peer is not yet connected to the relay. (Default: `false`) + * + * @typedef {object} PreloadOptions + * @property {boolean} [enabled] - Enable content preloading (Default: `true`) + * @property {string[]} [addresses] - Multiaddr API addresses of nodes that should preload content. + * **NOTE:** nodes specified here should also be added to your node's bootstrap address list at `config.Boostrap`. + * + * @typedef {object} ExperimentalOptions + * @property {boolean} [ipnsPubsub] - Enable pub-sub on IPNS. (Default: `false`) + * @property {boolean} [sharding] - Enable directory sharding. Directories that have many child objects will be represented by multiple DAG nodes instead of just one. It can improve lookup performance when a directory has several thousand files or more. (Default: `false`) + * + * @typedef {import('./components').IPFSRepo} IPFSRepo + */ + +/** + * Utility type to write type level conditionals + * + * @template Conditon, Then, Else + * @typedef {NonNullable extends false ? Else : Then } If + */ diff --git a/packages/ipfs-core/src/ipns/index.js b/packages/ipfs-core/src/ipns/index.js index d7df73be11..87fd6b4970 100644 --- a/packages/ipfs-core/src/ipns/index.js +++ b/packages/ipfs-core/src/ipns/index.js @@ -3,8 +3,9 @@ const { createFromPrivKey } = require('peer-id') const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns') -log.error = debug('ipfs:ipns:error') +const log = Object.assign(debug('ipfs:ipns'), { + error: debug('ipfs:ipns:error') +}) const IpnsPublisher = require('./publisher') const IpnsRepublisher = require('./republisher') @@ -34,6 +35,7 @@ class IPNS { // // Add to cache const id = peerId.toB58String() + // @ts-ignore - parseFloat expects string const ttEol = parseFloat(lifetime) const ttl = (ttEol < defaultRecordTtl) ? ttEol : defaultRecordTtl @@ -53,13 +55,11 @@ class IPNS { } // Resolve - async resolve (name, options) { + async resolve (name, options = {}) { if (typeof name !== 'string') { throw errcode(new Error('name received is not valid'), 'ERR_INVALID_NAME') } - options = options || {} - // If recursive, we should not try to get the cached value if (!options.nocache && !options.recursive) { // Try to get the record from cache diff --git a/packages/ipfs-core/src/ipns/publisher.js b/packages/ipfs-core/src/ipns/publisher.js index 4fccf589ab..3b66854dc0 100644 --- a/packages/ipfs-core/src/ipns/publisher.js +++ b/packages/ipfs-core/src/ipns/publisher.js @@ -4,8 +4,9 @@ const PeerId = require('peer-id') const { Key, Errors } = require('interface-datastore') const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns:publisher') -log.error = debug('ipfs:ipns:publisher:error') +const log = Object.assign(debug('ipfs:ipns:publisher'), { + error: debug('ipfs:ipns:publisher:error') +}) const uint8ArrayToString = require('uint8arrays/to-string') const ipns = require('ipns') @@ -45,11 +46,12 @@ class IpnsPublisher { throw errcode(new Error(errMsg), 'ERR_INVALID_PEER_ID') } + // @ts-ignore - accessing private property isn't allowed const publicKey = peerId._pubKey const embedPublicKeyRecord = await ipns.embedPublicKey(publicKey, record) const keys = ipns.getIdKeys(peerId.toBytes()) - await this._publishEntry(keys.routingKey, embedPublicKeyRecord || record, peerId) + await this._publishEntry(keys.routingKey, embedPublicKeyRecord || record) // Publish the public key to support old go-ipfs nodes that are looking for it in the routing // We will be able to deprecate this part in the future, since the public keys will be only @@ -125,7 +127,7 @@ class IpnsPublisher { // Returns the record this node has published corresponding to the given peer ID. // If `checkRouting` is true and we have no existing record, this method will check the routing system for any existing records. - async _getPublished (peerId, options) { + async _getPublished (peerId, options = {}) { if (!(PeerId.isPeerId(peerId))) { const errMsg = 'peerId received is not valid' @@ -134,7 +136,6 @@ class IpnsPublisher { throw errcode(new Error(errMsg), 'ERR_INVALID_PEER_ID') } - options = options || {} const checkRouting = options.checkRouting !== false try { diff --git a/packages/ipfs-core/src/ipns/republisher.js b/packages/ipfs-core/src/ipns/republisher.js index 78bdd5edbb..0efccdc954 100644 --- a/packages/ipfs-core/src/ipns/republisher.js +++ b/packages/ipfs-core/src/ipns/republisher.js @@ -6,8 +6,9 @@ const PeerId = require('peer-id') const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns:republisher') -log.error = debug('ipfs:ipns:republisher:error') +const log = Object.assign(debug('ipfs:ipns:republisher'), { + error: debug('ipfs:ipns:republisher:error') +}) const minute = 60 * 1000 const hour = 60 * minute @@ -16,12 +17,12 @@ const defaultBroadcastInterval = 4 * hour const defaultRecordLifetime = 24 * hour class IpnsRepublisher { - constructor (publisher, datastore, peerId, keychain, options) { + constructor (publisher, datastore, peerId, keychain, options = {}) { this._publisher = publisher this._datastore = datastore this._peerId = peerId this._keychain = keychain - this._options = options || {} + this._options = options this._republishHandle = null } @@ -32,14 +33,21 @@ class IpnsRepublisher { // TODO: this handler should be isolated in another module const republishHandle = { + /** @type {null|(() => Promise)} */ _task: null, + /** @type {null|Promise} */ _inflightTask: null, + /** @type {null|NodeJS.Timeout} */ _timeoutId: null, + /** + * @param {function(): number} period + */ runPeriodically: (period) => { republishHandle._timeoutId = setTimeout(async () => { republishHandle._timeoutId = null try { + // @ts-ignore - _task could be null republishHandle._inflightTask = republishHandle._task() await republishHandle._inflightTask @@ -54,7 +62,9 @@ class IpnsRepublisher { }, cancel: async () => { // do not run again - clearTimeout(republishHandle._timeoutId) + if (republishHandle._timeoutId != null) { + clearTimeout(republishHandle._timeoutId) + } republishHandle._task = null // wait for the currently in flight task to complete diff --git a/packages/ipfs-core/src/ipns/resolver.js b/packages/ipfs-core/src/ipns/resolver.js index 4e119b53cb..f57936064d 100644 --- a/packages/ipfs-core/src/ipns/resolver.js +++ b/packages/ipfs-core/src/ipns/resolver.js @@ -4,8 +4,9 @@ const ipns = require('ipns') const PeerId = require('peer-id') const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns:resolver') -log.error = debug('ipfs:ipns:resolver:error') +const log = Object.assign(debug('ipfs:ipns:resolver'), { + error: debug('ipfs:ipns:resolver:error') +}) const uint8ArrayToString = require('uint8arrays/to-string') const { Errors } = require('interface-datastore') @@ -18,14 +19,11 @@ class IpnsResolver { this._routing = routing } - async resolve (name, options) { - options = options || {} - + async resolve (name, options = {}) { if (typeof name !== 'string') { throw errcode(new Error('invalid name'), 'ERR_INVALID_NAME') } - options = options || {} const recursive = options.recursive && options.recursive.toString() === 'true' const nameSegments = name.split('/') diff --git a/packages/ipfs-core/src/ipns/routing/offline-datastore.js b/packages/ipfs-core/src/ipns/routing/offline-datastore.js index 558aeb62e0..aef4bbe0b0 100644 --- a/packages/ipfs-core/src/ipns/routing/offline-datastore.js +++ b/packages/ipfs-core/src/ipns/routing/offline-datastore.js @@ -6,8 +6,9 @@ const { encodeBase32 } = require('./utils') const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns:offline-datastore') -log.error = debug('ipfs:ipns:offline-datastore:error') +const log = Object.assign(debug('ipfs:ipns:offline-datastore'), { + error: debug('ipfs:ipns:offline-datastore:error') +}) // Offline datastore aims to mimic the same encoding as routing when storing records // to the local datastore diff --git a/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js b/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js index 46c445dc12..ae0f02bbe4 100644 --- a/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js +++ b/packages/ipfs-core/src/ipns/routing/pubsub-datastore.js @@ -10,8 +10,9 @@ const withIs = require('class-is') const errcode = require('err-code') const debug = require('debug') -const log = debug('ipfs:ipns:pubsub') -log.error = debug('ipfs:ipns:pubsub:error') +const log = Object.assign(debug('ipfs:ipns:pubsub'), { + error: debug('ipfs:ipns:pubsub:error') +}) // Pubsub datastore aims to manage the pubsub subscriptions for IPNS class IpnsPubsubDatastore { @@ -29,7 +30,7 @@ class IpnsPubsubDatastore { * * @param {Buffer} key - identifier of the value. * @param {Buffer} value - value to be stored. - * @returns {void} + * @returns {Promise} */ async put (key, value) { // eslint-disable-line require-await return this._pubsubDs.put(key, value) @@ -111,7 +112,7 @@ class IpnsPubsubDatastore { * Cancel pubsub subscriptions related to ipns. * * @param {string} name - ipns path to cancel the pubsub subscription. - * @returns {{canceled: boolean}} + * @returns {Promise<{canceled: boolean}>} */ async cancel (name) { // eslint-disable-line require-await if (typeof name !== 'string') { diff --git a/packages/ipfs-core/src/ipns/routing/utils.js b/packages/ipfs-core/src/ipns/routing/utils.js index e6c8e7841c..20f2b65423 100644 --- a/packages/ipfs-core/src/ipns/routing/utils.js +++ b/packages/ipfs-core/src/ipns/routing/utils.js @@ -8,5 +8,5 @@ module.exports = { validator: { func: (key, record, cb) => ipns.validator.validate(record, key, cb) }, - selector: (k, records) => ipns.validator.select(records[0], records[1]) + selector: (_k, records) => ipns.validator.select(records[0], records[1]) } diff --git a/packages/ipfs-core/src/mfs-preload.js b/packages/ipfs-core/src/mfs-preload.js index a93d2a9f69..aabffee3bb 100644 --- a/packages/ipfs-core/src/mfs-preload.js +++ b/packages/ipfs-core/src/mfs-preload.js @@ -2,11 +2,17 @@ const debug = require('debug') const { cidToString } = require('ipfs-core-utils/src/cid') -const log = debug('ipfs:mfs-preload') -log.error = debug('ipfs:mfs-preload:error') +const log = Object.assign(debug('ipfs:mfs-preload'), { + error: debug('ipfs:mfs-preload:error') +}) -module.exports = ({ preload, files, options }) => { - options = options || {} +/** + * @param {Object} config + * @param {import('./components/index').Preload} config.preload + * @param {import('./components/index').Files} config.files + * @param {import('./components/init').PreloadOptions} [config.options] + */ +module.exports = ({ preload, files, options = {} }) => { options.interval = options.interval || 30 * 1000 if (!options.enabled) { @@ -35,14 +41,24 @@ module.exports = ({ preload, files, options }) => { } return { + /** + * @returns {Promise} + */ async start () { const stats = await files.stat('/') rootCid = cidToString(stats.cid, { base: 'base32' }) log(`monitoring MFS root ${stats.cid}`) timeoutId = setTimeout(preloadMfs, options.interval) }, + /** + * @returns {void} + */ stop () { clearTimeout(timeoutId) } } } + +/** + * @typedef {ReturnType} MFSPreload + */ diff --git a/packages/ipfs-core/src/preload.js b/packages/ipfs-core/src/preload.js index 782b8e5252..57d24fc76a 100644 --- a/packages/ipfs-core/src/preload.js +++ b/packages/ipfs-core/src/preload.js @@ -12,8 +12,12 @@ const log = Object.assign( { error: debug('ipfs:preload:error') } ) -module.exports = options => { - options = options || {} +/** + * @param {Object} [options] + * @param {boolean} [options.enabled] + * @param {string[]} [options.addresses] + */ +const createPreloader = (options = {}) => { options.enabled = Boolean(options.enabled) options.addresses = options.addresses || [] @@ -30,6 +34,10 @@ module.exports = options => { let requests = [] const apiUris = options.addresses.map(toUri) + /** + * @param {string|CID} path + * @returns {Promise} + */ const api = async path => { try { if (stopped) throw new Error(`preload ${path} but preloader is not started`) @@ -66,10 +74,16 @@ module.exports = options => { } } + /** + * @returns {void} + */ api.start = () => { stopped = false } + /** + * @returns {void} + */ api.stop = () => { stopped = true log(`aborting ${requests.length} pending preload request(s)`) @@ -79,3 +93,5 @@ module.exports = options => { return api } + +module.exports = createPreloader diff --git a/packages/ipfs-core/src/runtime/dns-browser.js b/packages/ipfs-core/src/runtime/dns-browser.js index 80d8bfc988..3d2fab2bd4 100644 --- a/packages/ipfs-core/src/runtime/dns-browser.js +++ b/packages/ipfs-core/src/runtime/dns-browser.js @@ -22,9 +22,7 @@ const ipfsPath = (response) => { } module.exports = async (fqdn, opts) => { // eslint-disable-line require-await - const resolveDnslink = async (fqdn, opts) => { - opts = opts || {} - + const resolveDnslink = async (fqdn, opts = {}) => { const searchParams = new URLSearchParams(opts) searchParams.set('arg', fqdn) diff --git a/packages/ipfs-core/src/runtime/ipld-browser.js b/packages/ipfs-core/src/runtime/ipld-browser.js index 4d47639bbf..31c19c141a 100644 --- a/packages/ipfs-core/src/runtime/ipld-browser.js +++ b/packages/ipfs-core/src/runtime/ipld-browser.js @@ -4,9 +4,7 @@ const ipldDagCbor = require('ipld-dag-cbor') const ipldDagPb = require('ipld-dag-pb') const ipldRaw = require('ipld-raw') -module.exports = (blockService, options) => { - options = options || {} - +module.exports = (blockService, options = {}) => { return mergeOptions.call( // ensure we have the defaults formats even if the user overrides `formats: []` { concatArrays: true }, diff --git a/packages/ipfs-core/src/runtime/ipld-nodejs.js b/packages/ipfs-core/src/runtime/ipld-nodejs.js index 25f68a697c..9da87b90a1 100644 --- a/packages/ipfs-core/src/runtime/ipld-nodejs.js +++ b/packages/ipfs-core/src/runtime/ipld-nodejs.js @@ -45,9 +45,7 @@ const IpldFormats = { } } -module.exports = (blockService, options, log) => { - options = options || {} - +module.exports = (blockService, options = {}, log) => { return mergeOptions.call( // ensure we have the defaults formats even if the user overrides `formats: []` { concatArrays: true }, diff --git a/packages/ipfs-core/src/runtime/preload-browser.js b/packages/ipfs-core/src/runtime/preload-browser.js index c24d0506bd..9d2e8a3997 100644 --- a/packages/ipfs-core/src/runtime/preload-browser.js +++ b/packages/ipfs-core/src/runtime/preload-browser.js @@ -5,16 +5,16 @@ const { default: PQueue } = require('p-queue') const HTTP = require('ipfs-utils/src/http') const debug = require('debug') -const log = debug('ipfs:preload') -log.error = debug('ipfs:preload:error') +const log = Object.assign(debug('ipfs:preload'), { + error: debug('ipfs:preload:error') +}) // browsers limit concurrent connections per host, // we don't want preload calls to exhaust the limit (~6) const httpQueue = new PQueue({ concurrency: 4 }) -module.exports = function preload (url, options) { +module.exports = function preload (url, options = {}) { log(url) - options = options || {} return httpQueue.add(async () => { const res = await HTTP.post(url, { signal: options.signal }) diff --git a/packages/ipfs-core/src/runtime/preload-nodejs.js b/packages/ipfs-core/src/runtime/preload-nodejs.js index 066c5d5c6f..4f2ca985e5 100644 --- a/packages/ipfs-core/src/runtime/preload-nodejs.js +++ b/packages/ipfs-core/src/runtime/preload-nodejs.js @@ -3,12 +3,12 @@ const HTTP = require('ipfs-utils/src/http') const debug = require('debug') -const log = debug('ipfs:preload') -log.error = debug('ipfs:preload:error') +const log = Object.assign(debug('ipfs:preload'), { + error: debug('ipfs:preload:error') +}) -module.exports = async function preload (url, options) { +module.exports = async function preload (url, options = {}) { log(url) - options = options || {} const res = await HTTP.post(url, { signal: options.signal }) diff --git a/packages/ipfs-core/src/runtime/repo-browser.js b/packages/ipfs-core/src/runtime/repo-browser.js index de4c9f59bf..ad13c85d93 100644 --- a/packages/ipfs-core/src/runtime/repo-browser.js +++ b/packages/ipfs-core/src/runtime/repo-browser.js @@ -2,8 +2,7 @@ const IPFSRepo = require('ipfs-repo') -module.exports = (options) => { - options = options || {} +module.exports = (options = {}) => { const repoPath = options.path || 'ipfs' return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate }) } diff --git a/packages/ipfs-core/src/runtime/repo-nodejs.js b/packages/ipfs-core/src/runtime/repo-nodejs.js index c0208f7582..e8ad5ee54c 100644 --- a/packages/ipfs-core/src/runtime/repo-nodejs.js +++ b/packages/ipfs-core/src/runtime/repo-nodejs.js @@ -4,12 +4,17 @@ const os = require('os') const IPFSRepo = require('ipfs-repo') const path = require('path') -module.exports = options => { - options = options || {} +/** + * @param {Object} [options] + * @param {string} [options.path] + * @param {boolean} [options.silent] + * @param {boolean} [options.autoMigrate] + */ +module.exports = (options = {}) => { const repoPath = options.path || path.join(os.homedir(), '.jsipfs') let lastMigration = null - let onMigrationProgress = (version, percentComplete, message) => { + const onMigrationProgress = (version, percentComplete, message) => { if (version !== lastMigration) { lastMigration = version @@ -19,12 +24,8 @@ module.exports = options => { console.info(`${percentComplete.toString().padStart(6, ' ')}% ${message}`) // eslint-disable-line no-console } - if (options.silent) { - onMigrationProgress = null - } - return new IPFSRepo(repoPath, { autoMigrate: options.autoMigrate, - onMigrationProgress + onMigrationProgress: options.silent ? null : onMigrationProgress }) } diff --git a/packages/ipfs-core/src/utils.js b/packages/ipfs-core/src/utils.js index 155363b4d5..b14873723a 100644 --- a/packages/ipfs-core/src/utils.js +++ b/packages/ipfs-core/src/utils.js @@ -4,7 +4,7 @@ const isIpfs = require('is-ipfs') const CID = require('cids') const TimeoutController = require('timeout-abort-controller') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const parseDuration = require('parse-duration').default const Key = require('interface-datastore').Key const { TimeoutError } = require('./errors') @@ -30,7 +30,7 @@ exports.MFS_MAX_LINKS = 174 * * @param {string} pathStr - An ipfs-path, or ipns-path or a cid * @returns {string} - ipfs-path or ipns-path - * @throws on an invalid @param ipfsPath + * @throws on an invalid @param pathStr */ const normalizePath = (pathStr) => { if (isIpfs.cid(pathStr)) { @@ -43,6 +43,10 @@ const normalizePath = (pathStr) => { } // TODO: do we need both normalizePath and normalizeCidPath? +/** + * @param {Uint8Array|CID|string} path + * @returns {string} + */ const normalizeCidPath = (path) => { if (path instanceof Uint8Array) { return new CID(path).toString() @@ -69,15 +73,14 @@ const normalizeCidPath = (path) => { * - /ipfs//link/to/pluto * - multihash Buffer * - * @param {Dag} dag - The IPFS dag api + * @param {import('./components').DAG} dag - The IPFS dag api * @param {CID | string} ipfsPath - A CID or IPFS path * @param {Object} [options] - Optional options passed directly to dag.resolve - * @returns {CID} + * @returns {Promise} */ -const resolvePath = async function (dag, ipfsPath, options) { - options = options || {} - +const resolvePath = async function (dag, ipfsPath, options = {}) { if (isIpfs.cid(ipfsPath)) { + // @ts-ignore - CID|string seems to confuse typedef return new CID(ipfsPath) } @@ -98,9 +101,14 @@ const resolvePath = async function (dag, ipfsPath, options) { return result.cid } -const mapFile = (file, options) => { - options = options || {} - +/** + * @param {InputFile|UnixFSFile} file + * @param {Object} [options] + * @param {boolean} [options.includeContent] + * @returns {IPFSEntry} + */ +const mapFile = (file, options = {}) => { + /** @type {IPFSEntry} */ const output = { cid: file.cid, path: file.path, @@ -111,6 +119,7 @@ const mapFile = (file, options) => { } if (file.unixfs) { + // @ts-ignore - TS type can't be changed from File to Directory output.type = file.unixfs.type === 'directory' ? 'dir' : 'file' if (file.unixfs.type === 'file') { @@ -128,6 +137,61 @@ const mapFile = (file, options) => { return output } +/** + * @typedef {Object} File + * @property {'file'} type + * @property {CID} cid + * @property {string} name + * @property {string} path - File path + * @property {AsyncIterable} [content] - File content + * @property {number} [mode] + * @property {MTime} [mtime] + * @property {number} size + * @property {number} depth + * + * @typedef {Object} Directory + * @property {'dir'} type + * @property {CID} cid + * @property {string} name + * @property {string} path - Directory path + * @property {number} [mode] + * @property {MTime} [mtime] + * @property {number} size + * @property {number} depth + * + * @typedef {File|Directory} IPFSEntry + * + * @typedef {Object} BaseFile + * @property {CID} cid + * @property {string} path + * @property {string} name + * + * @typedef {Object} InputFileExt + * @property {undefined} [unixfs] + * + * @typedef {BaseFile & InputFileExt} InputFile + * + * @typedef {Object} UnixFSeExt + * @property {() => AsyncIterable} content + * @property {UnixFS} unixfs + * + * @typedef {BaseFile & UnixFSeExt} UnixFSFile + * + * + * @typedef {Object} UnixFS + * @property {'directory'|'file'|'dir'} type + * @property {() => number} fileSize + * @property {() => AsyncIterable} content + * @property {number} mode + * @property {MTime} mtime + * + * @typedef {object} MTime + * @property {number} secs - the number of seconds since (positive) or before + * (negative) the Unix Epoch began + * @property {number} [nsecs] - the number of nanoseconds since the last full + * second. + */ + /** * @template {any[]} ARGS * @template R @@ -140,19 +204,35 @@ const mapFile = (file, options) => { * @property {AbortSignal} [signal] - Can be used to cancel any long running requests started as a result of this call */ +/** + * @typedef {Object} Mtime + * @property {number} [secs] + * @property {number} [nsecs] + */ + +/** + * @typedef {[number, number]} Hrtime + */ + +/** + * @typedef {Object} PreloadOptions + * @property {boolean} [preload=true] + */ + +/** + * @template {Record} ExtraOptions + */ + /** * @template {any[]} ARGS - * @template {Promise | AsyncIterable} R + * @template {Promise | AsyncIterable} R - The return type of `fn` * @param {Fn} fn * @param {number} [optionsArgIndex] * @returns {Fn} */ function withTimeoutOption (fn, optionsArgIndex) { - /** - * @param {...any} args - * @returns {R} - */ - const fnWithTimeout = (/** @type {ARGS} */...args) => { + // eslint-disable-next-line + return /** @returns {R} */(/** @type {ARGS} */...args) => { const options = args[optionsArgIndex == null ? args.length - 1 : optionsArgIndex] if (!options || !options.timeout) return fn(...args) @@ -165,7 +245,8 @@ function withTimeoutOption (fn, optionsArgIndex) { options.signal = anySignal([options.signal, controller.signal]) const fnRes = fn(...args) - const timeoutPromise = new Promise((resolve, reject) => { + // eslint-disable-next-line promise/param-names + const timeoutPromise = new Promise((_resolve, reject) => { controller.signal.addEventListener('abort', () => { reject(new TimeoutError()) }) @@ -237,8 +318,6 @@ function withTimeoutOption (fn, optionsArgIndex) { } })() } - - return fnWithTimeout } exports.normalizePath = normalizePath diff --git a/packages/ipfs-core/src/utils/tlru.js b/packages/ipfs-core/src/utils/tlru.js index bc26d10536..6f63f672a9 100644 --- a/packages/ipfs-core/src/utils/tlru.js +++ b/packages/ipfs-core/src/utils/tlru.js @@ -1,4 +1,7 @@ 'use strict' + +/** @type {typeof import('hashlru').default} */ +// @ts-ignore - hashlru has incorrect typedefs const hashlru = require('hashlru') /** @@ -7,6 +10,7 @@ const hashlru = require('hashlru') * @see https://arxiv.org/pdf/1801.00390 * @todo move this to ipfs-utils or it's own package * + * @template T * @class TLRU */ class TLRU { @@ -14,7 +18,6 @@ class TLRU { * Creates an instance of TLRU. * * @param {number} maxSize - * @memberof TLRU */ constructor (maxSize) { this.lru = hashlru(maxSize) @@ -24,8 +27,8 @@ class TLRU { * Get the value from the a key * * @param {string} key - * @returns {any} - * @memberof TLRU + * @returns {T|undefined} + * @memberof TLoRU */ get (key) { const value = this.lru.get(key) @@ -43,9 +46,9 @@ class TLRU { * Set a key value pair * * @param {string} key - * @param {any} value + * @param {T} value * @param {number} ttl - in miliseconds - * @memberof TLRU + * @returns {void} */ set (key, value, ttl) { this.lru.set(key, { value, expire: Date.now() + ttl }) @@ -56,7 +59,6 @@ class TLRU { * * @param {string} key * @returns {boolean} - * @memberof TLRU */ has (key) { const value = this.get(key) @@ -70,7 +72,6 @@ class TLRU { * Remove key * * @param {string} key - * @memberof TLRU */ remove (key) { this.lru.remove(key) diff --git a/packages/ipfs-core/tsconfig.json b/packages/ipfs-core/tsconfig.json new file mode 100644 index 0000000000..6264673867 --- /dev/null +++ b/packages/ipfs-core/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "package.json" + ], + "references": [ + { + "path": "../ipfs-core-utils" + } + ] +} diff --git a/packages/ipfs-http-client/.aegir.js b/packages/ipfs-http-client/.aegir.js index b286427a76..1cec2c1091 100644 --- a/packages/ipfs-http-client/.aegir.js +++ b/packages/ipfs-http-client/.aegir.js @@ -16,7 +16,7 @@ const server = createServer({ let echoServer = new EchoServer() module.exports = { - bundlesize: { maxSize: '90kB' }, + bundlesize: { maxSize: '81kB' }, karma: { files: [{ pattern: 'node_modules/interface-ipfs-core/test/fixtures/**/*', diff --git a/packages/ipfs-http-client/README.md b/packages/ipfs-http-client/README.md index 77d2a09344..26836207ee 100644 --- a/packages/ipfs-http-client/README.md +++ b/packages/ipfs-http-client/README.md @@ -96,8 +96,6 @@ All core API methods take _additional_ `options` specific to the HTTP API: * `headers` - An object or [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) instance that can be used to set custom HTTP headers. Note that this option can also be [configured globally](#custom-headers) via the constructor options. * `searchParams` - An object or [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams) instance that can be used to add additional query parameters to the query string sent with each request. -* `ipld.formats` - An array of additional [IPLD formats](https://github.com/ipld/interface-ipld-format) to support -* `ipld.loadFormat` an async function that takes the name of an [IPLD format](https://github.com/ipld/interface-ipld-format) as a string and should return the implementation of that codec ### Instance Utils diff --git a/packages/ipfs-http-client/package.json b/packages/ipfs-http-client/package.json index 267eb9c819..aee5887cff 100644 --- a/packages/ipfs-http-client/package.json +++ b/packages/ipfs-http-client/package.json @@ -20,6 +20,13 @@ "go-ipfs": false, "ipfs-core-utils/src/files/normalise-input": "ipfs-core-utils/src/files/normalise-input/index.browser.js" }, + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } + }, "repository": { "type": "git", "url": "git+https://github.com/ipfs/js-ipfs.git" @@ -34,7 +41,9 @@ "test:chrome": "cross-env ECHO_SERVER_PORT=37496 aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "cross-env ECHO_SERVER_PORT=37497 aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:js && npm run build:types", + "build:js": "aegir build", + "build:types": "tsc --build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rm -rf ./dist", "dep-check": "aegir dep-check" @@ -70,7 +79,7 @@ "uint8arrays": "^1.1.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "cross-env": "^7.0.0", "go-ipfs": "^0.7.0", "interface-ipfs-core": "^0.140.0", @@ -78,7 +87,9 @@ "it-all": "^1.0.4", "it-concat": "^1.0.1", "it-pipe": "^1.1.0", - "nock": "^13.0.2" + "nock": "^13.0.2", + "ipfs-core": "0.0.1", + "typescript": "^4.0.3" }, "engines": { "node": ">=10.3.0", diff --git a/packages/ipfs-http-client/src/add-all.js b/packages/ipfs-http-client/src/add-all.js index 45d0f755db..1a97a3db74 100644 --- a/packages/ipfs-http-client/src/add-all.js +++ b/packages/ipfs-http-client/src/add-all.js @@ -5,14 +5,14 @@ const toCamel = require('./lib/object-to-camel') const configure = require('./lib/configure') const multipartRequest = require('./lib/multipart-request') const toUrlSearchParams = require('./lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure((api) => { /** - * @type {import('../../ipfs/src/core/components/add-all').AddAll} + * @type {import('.').Implements} */ - async function * addAll (input, options = {}) { + async function * addAll (source, options = {}) { const progressFn = options.progress // allow aborting requests on body errors @@ -28,7 +28,7 @@ module.exports = configure((api) => { timeout: options.timeout, signal, ...( - await multipartRequest(input, controller, options.headers) + await multipartRequest(source, controller, options.headers) ) }) @@ -46,11 +46,7 @@ module.exports = configure((api) => { }) /** - * @typedef {import('../../ipfs/src/core/components/add-all').UnixFSEntry} UnixFSEntry - */ - -/** - * @param {*} input + * @param {any} input * @returns {UnixFSEntry} */ function toCoreInterface ({ name, hash, size, mode, mtime, mtimeNsecs }) { @@ -71,6 +67,9 @@ function toCoreInterface ({ name, hash, size, mode, mtime, mtimeNsecs }) { } } - // @ts-ignore return output } + +/** + * @typedef {import('ipfs-core/src/components/add-all/index').UnixFSEntry} UnixFSEntry + */ diff --git a/packages/ipfs-http-client/src/add.js b/packages/ipfs-http-client/src/add.js index d33e83f02b..0cdd83eaae 100644 --- a/packages/ipfs-http-client/src/add.js +++ b/packages/ipfs-http-client/src/add.js @@ -5,22 +5,17 @@ const last = require('it-last') const configure = require('./lib/configure') /** - * @typedef {import("./lib/core").ClientOptions} ClientOptions - */ - -/** - * @param {ClientOptions} options + * @param {import("./lib/core").ClientOptions} options */ module.exports = (options) => { const all = addAll(options) - return configure(() => { /** - * @type {import('../../ipfs/src/core/components/add').Add} + * @type {import('.').Implements} */ - async function add (input, options = {}) { // eslint-disable-line require-await - // @ts-ignore - return last(all(input, options)) + async function add (input, options = {}) { + // @ts-ignore - last may return undefind if source is empty + return await last(all(input, options)) } return add })(options) diff --git a/packages/ipfs-http-client/src/bitswap/stat.js b/packages/ipfs-http-client/src/bitswap/stat.js index d75b45dad3..5d3d33a894 100644 --- a/packages/ipfs-http-client/src/bitswap/stat.js +++ b/packages/ipfs-http-client/src/bitswap/stat.js @@ -7,7 +7,7 @@ const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../../ipfs/src/core/components/bitswap/stat').Stat} + * @type {import('..').Implements} */ async function stat (options = {}) { const res = await api.post('bitswap/stat', { diff --git a/packages/ipfs-http-client/src/bitswap/unwant.js b/packages/ipfs-http-client/src/bitswap/unwant.js index 99731fed5e..806827acf9 100644 --- a/packages/ipfs-http-client/src/bitswap/unwant.js +++ b/packages/ipfs-http-client/src/bitswap/unwant.js @@ -6,13 +6,14 @@ const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../../ipfs/src/core/components/bitswap/unwant').Unwant} + * @type {import('..').Implements} */ async function unwant (cid, options = {}) { const res = await api.post('bitswap/unwant', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ + // @ts-ignore - CID|string seems to confuse typedef arg: typeof cid === 'string' ? cid : new CID(cid).toString(), ...options }), diff --git a/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js b/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js index 59b73212ad..973ce7c8eb 100644 --- a/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js +++ b/packages/ipfs-http-client/src/bitswap/wantlist-for-peer.js @@ -6,9 +6,10 @@ const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../../ipfs/src/core/components/bitswap/wantlist-for-peer').WantlistForPeer} + * @type {import('..').Implements} */ async function wantlistForPeer (peerId, options = {}) { + // @ts-ignore - CID|string seems to confuse typedef peerId = typeof peerId === 'string' ? peerId : new CID(peerId).toString() const res = await (await api.post('bitswap/wantlist', { diff --git a/packages/ipfs-http-client/src/bitswap/wantlist.js b/packages/ipfs-http-client/src/bitswap/wantlist.js index 1927f0cc54..fab480887e 100644 --- a/packages/ipfs-http-client/src/bitswap/wantlist.js +++ b/packages/ipfs-http-client/src/bitswap/wantlist.js @@ -6,7 +6,7 @@ const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../../ipfs/src/core/components/bitswap/wantlist').WantlistFn} + * @type {import('..').Implements} */ async function wantlist (options = {}) { const res = await (await api.post('bitswap/wantlist', { diff --git a/packages/ipfs-http-client/src/block/get.js b/packages/ipfs-http-client/src/block/get.js index 68b5cc7e39..ea29369b38 100644 --- a/packages/ipfs-http-client/src/block/get.js +++ b/packages/ipfs-http-client/src/block/get.js @@ -7,9 +7,10 @@ const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../../ipfs/src/core/components/block/get').BlockGet} + * @type {import('..').Implements} */ async function get (cid, options = {}) { + // @ts-ignore - CID|string seems to confuse typedef cid = new CID(cid) const res = await api.post('block/get', { diff --git a/packages/ipfs-http-client/src/block/put.js b/packages/ipfs-http-client/src/block/put.js index 7f0351e7b4..d64b747ff9 100644 --- a/packages/ipfs-http-client/src/block/put.js +++ b/packages/ipfs-http-client/src/block/put.js @@ -6,12 +6,12 @@ const multihash = require('multihashes') const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure(api => { /** - * @type {import('../../../ipfs/src/core/components/block/put').BlockPut} + * @type {import('..').Implements} */ async function put (data, options = {}) { if (Block.isBlock(data)) { diff --git a/packages/ipfs-http-client/src/block/rm.js b/packages/ipfs-http-client/src/block/rm.js index 5f25b9fc95..66930c9458 100644 --- a/packages/ipfs-http-client/src/block/rm.js +++ b/packages/ipfs-http-client/src/block/rm.js @@ -5,7 +5,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function * rm (cid, options = {}) { + /** + * @type {import('..').Implements} + */ + async function * rm (cid, options = {}) { if (!Array.isArray(cid)) { cid = [cid] } @@ -25,6 +28,8 @@ module.exports = configure(api => { yield toCoreInterface(removed) } } + + return rm }) function toCoreInterface (removed) { diff --git a/packages/ipfs-http-client/src/block/stat.js b/packages/ipfs-http-client/src/block/stat.js index 12a3c167b9..8ca0cd8349 100644 --- a/packages/ipfs-http-client/src/block/stat.js +++ b/packages/ipfs-http-client/src/block/stat.js @@ -5,7 +5,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (cid, options = {}) => { + /** + * @type {import('..').Implements} + */ + async function stat (cid, options = {}) { const res = await api.post('block/stat', { timeout: options.timeout, signal: options.signal, @@ -19,4 +22,6 @@ module.exports = configure(api => { return { cid: new CID(data.Key), size: data.Size } } + + return stat }) diff --git a/packages/ipfs-http-client/src/bootstrap/add.js b/packages/ipfs-http-client/src/bootstrap/add.js index 40e67c1c03..c2efcab28c 100644 --- a/packages/ipfs-http-client/src/bootstrap/add.js +++ b/packages/ipfs-http-client/src/bootstrap/add.js @@ -1,16 +1,14 @@ 'use strict' -const Multiaddr = require('multiaddr') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') +const Multiaddr = require('multiaddr') module.exports = configure(api => { - return async (addr, options = {}) => { - if (addr && typeof addr === 'object' && !Multiaddr.isMultiaddr(addr)) { - options = addr - addr = null - } - + /** + * @type {import('..').Implements} + */ + async function add (addr, options = {}) { const res = await api.post('bootstrap/add', { timeout: options.timeout, signal: options.signal, @@ -21,6 +19,10 @@ module.exports = configure(api => { headers: options.headers }) - return res.json() + const { Peers } = await res.json() + + return { Peers: Peers.map(ma => new Multiaddr(ma)) } } + + return add }) diff --git a/packages/ipfs-http-client/src/bootstrap/clear.js b/packages/ipfs-http-client/src/bootstrap/clear.js index 99d8091429..6a334847eb 100644 --- a/packages/ipfs-http-client/src/bootstrap/clear.js +++ b/packages/ipfs-http-client/src/bootstrap/clear.js @@ -2,9 +2,13 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') +const Multiaddr = require('multiaddr') module.exports = configure(api => { - return async (options = {}) => { + /** + * @type {import('..').Implements} + */ + async function clear (options = {}) { const res = await api.post('bootstrap/rm', { timeout: options.timeout, signal: options.signal, @@ -15,6 +19,10 @@ module.exports = configure(api => { headers: options.headers }) - return res.json() + const { Peers } = await res.json() + + return { Peers: Peers.map(ma => new Multiaddr(ma)) } } + + return clear }) diff --git a/packages/ipfs-http-client/src/bootstrap/list.js b/packages/ipfs-http-client/src/bootstrap/list.js index 0dc2b7cbf0..515cc2daf4 100644 --- a/packages/ipfs-http-client/src/bootstrap/list.js +++ b/packages/ipfs-http-client/src/bootstrap/list.js @@ -2,9 +2,13 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') +const Multiaddr = require('multiaddr') module.exports = configure(api => { - return async (options = {}) => { + /** + * @type {import('..').Implements} + */ + async function list (options = {}) { const res = await api.post('bootstrap/list', { timeout: options.timeout, signal: options.signal, @@ -12,6 +16,10 @@ module.exports = configure(api => { headers: options.headers }) - return res.json() + const { Peers } = await res.json() + + return { Peers: Peers.map(ma => new Multiaddr(ma)) } } + + return list }) diff --git a/packages/ipfs-http-client/src/bootstrap/reset.js b/packages/ipfs-http-client/src/bootstrap/reset.js index 9332aafc71..f037076d68 100644 --- a/packages/ipfs-http-client/src/bootstrap/reset.js +++ b/packages/ipfs-http-client/src/bootstrap/reset.js @@ -2,9 +2,13 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') +const Multiaddr = require('multiaddr') module.exports = configure(api => { - return async (options = {}) => { + /** + * @type {import('..').Implements} + */ + async function reset (options = {}) { const res = await api.post('bootstrap/add', { timeout: options.timeout, signal: options.signal, @@ -15,6 +19,10 @@ module.exports = configure(api => { headers: options.headers }) - return res.json() + const { Peers } = await res.json() + + return { Peers: Peers.map(ma => new Multiaddr(ma)) } } + + return reset }) diff --git a/packages/ipfs-http-client/src/bootstrap/rm.js b/packages/ipfs-http-client/src/bootstrap/rm.js index 85760b8224..89c4506d01 100644 --- a/packages/ipfs-http-client/src/bootstrap/rm.js +++ b/packages/ipfs-http-client/src/bootstrap/rm.js @@ -1,16 +1,14 @@ 'use strict' -const Multiaddr = require('multiaddr') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') +const Multiaddr = require('multiaddr') module.exports = configure(api => { - return async (addr, options = {}) => { - if (addr && typeof addr === 'object' && !Multiaddr.isMultiaddr(addr)) { - options = addr - addr = null - } - + /** + * @type {import('..').Implements} + */ + async function rm (addr, options = {}) { const res = await api.post('bootstrap/rm', { timeout: options.timeout, signal: options.signal, @@ -21,6 +19,10 @@ module.exports = configure(api => { headers: options.headers }) - return res.json() + const { Peers } = await res.json() + + return { Peers: Peers.map(ma => new Multiaddr(ma)) } } + + return rm }) diff --git a/packages/ipfs-http-client/src/cat.js b/packages/ipfs-http-client/src/cat.js index b24d46e71d..d1f3b81cf0 100644 --- a/packages/ipfs-http-client/src/cat.js +++ b/packages/ipfs-http-client/src/cat.js @@ -5,7 +5,10 @@ const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') module.exports = configure(api => { - return async function * cat (path, options = {}) { + /** + * @type {import('.').Implements} + */ + async function * cat (path, options = {}) { const res = await api.post('cat', { timeout: options.timeout, signal: options.signal, @@ -18,4 +21,6 @@ module.exports = configure(api => { yield * res.iterator() } + + return cat }) diff --git a/packages/ipfs-http-client/src/config/get.js b/packages/ipfs-http-client/src/config/get.js index 3cc0e7e3cd..3b69da2679 100644 --- a/packages/ipfs-http-client/src/config/get.js +++ b/packages/ipfs-http-client/src/config/get.js @@ -4,7 +4,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (key, options = {}) => { + /** + * @type {import('..').ImplementsMethod<'get', import('ipfs-core/src/components/config')>} + */ + const get = async (key, options = {}) => { if (!key) { throw new Error('key argument is required') } @@ -22,4 +25,6 @@ module.exports = configure(api => { return data.Value } + + return get }) diff --git a/packages/ipfs-http-client/src/config/getAll.js b/packages/ipfs-http-client/src/config/getAll.js index 0c60c5d8c4..8e74b6b89e 100644 --- a/packages/ipfs-http-client/src/config/getAll.js +++ b/packages/ipfs-http-client/src/config/getAll.js @@ -4,7 +4,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (options = {}) => { + /** + * @type {import('..').ImplementsMethod<'getAll', import('ipfs-core/src/components/config')>} + */ + const getAll = async (options = {}) => { const res = await api.post('config/show', { timeout: options.timeout, signal: options.signal, @@ -17,4 +20,6 @@ module.exports = configure(api => { return data } + + return getAll }) diff --git a/packages/ipfs-http-client/src/config/profiles/apply.js b/packages/ipfs-http-client/src/config/profiles/apply.js index 6b8d34dfec..cf44f0f397 100644 --- a/packages/ipfs-http-client/src/config/profiles/apply.js +++ b/packages/ipfs-http-client/src/config/profiles/apply.js @@ -4,7 +4,7 @@ const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') module.exports = configure(api => { - return async (profile, options = {}) => { + async function apply (profile, options = {}) { const res = await api.post('config/profile/apply', { timeout: options.timeout, signal: options.signal, @@ -20,4 +20,6 @@ module.exports = configure(api => { original: data.OldCfg, updated: data.NewCfg } } + + return apply }) diff --git a/packages/ipfs-http-client/src/config/replace.js b/packages/ipfs-http-client/src/config/replace.js index ee6316404a..cb00bbefbf 100644 --- a/packages/ipfs-http-client/src/config/replace.js +++ b/packages/ipfs-http-client/src/config/replace.js @@ -4,11 +4,14 @@ const uint8ArrayFromString = require('uint8arrays/from-string') const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure(api => { - return async (config, options = {}) => { + /** + * @type {import('..').ImplementsMethod<'replace', import('ipfs-core/src/components/config')>} + */ + const replace = async (config, options = {}) => { // allow aborting requests on body errors const controller = new AbortController() const signal = anySignal([controller.signal, options.signal]) @@ -24,4 +27,6 @@ module.exports = configure(api => { return res.text() } + + return replace }) diff --git a/packages/ipfs-http-client/src/config/set.js b/packages/ipfs-http-client/src/config/set.js index 4388c8c94a..9a4f68aa41 100644 --- a/packages/ipfs-http-client/src/config/set.js +++ b/packages/ipfs-http-client/src/config/set.js @@ -5,25 +5,17 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (key, value, options = {}) => { + /** + * @type {import('..').ImplementsMethod<'set', import('ipfs-core/src/components/config')>} + */ + const set = async (key, value, options = {}) => { if (typeof key !== 'string') { throw new Error('Invalid key type') } const params = { - arg: [ - key, - value - ], - ...options - } - - if (typeof value === 'boolean') { - params.arg[1] = value.toString() - params.bool = true - } else if (typeof value !== 'string') { - params.arg[1] = JSON.stringify(value) - params.json = true + ...options, + ...encodeParam(key, value) } const res = await api.post('config', { @@ -35,4 +27,17 @@ module.exports = configure(api => { return toCamel(await res.json()) } + + return set }) + +const encodeParam = (key, value) => { + switch (typeof value) { + case 'boolean': + return { arg: [key, value.toString()], bool: true } + case 'string': + return { arg: [key, value] } + default: + return { arg: [key, JSON.stringify(value)], json: true } + } +} diff --git a/packages/ipfs-http-client/src/dag/get.js b/packages/ipfs-http-client/src/dag/get.js index 1094b97377..b3f1b3a402 100644 --- a/packages/ipfs-http-client/src/dag/get.js +++ b/packages/ipfs-http-client/src/dag/get.js @@ -15,7 +15,10 @@ module.exports = configure((api, options) => { const getBlock = require('../block/get')(options) const dagResolve = require('./resolve')(options) - return async (cid, options = {}) => { + /** + * @type {import('..').Implements} + */ + const get = async (cid, options = {}) => { const resolved = await dagResolve(cid, options) const block = await getBlock(resolved.cid, options) const dagResolver = resolvers[resolved.cid.codec] @@ -27,10 +30,12 @@ module.exports = configure((api, options) => { ) } - if (resolved.cid.codec === 'raw' && !resolved.remPath) { + if (resolved.cid.codec === 'raw' && !resolved.remainderPath) { resolved.remainderPath = '/' } return dagResolver.resolve(block.data, resolved.remainderPath) } + + return get }) diff --git a/packages/ipfs-http-client/src/dag/put.js b/packages/ipfs-http-client/src/dag/put.js index dfdd53cace..dad5ccc231 100644 --- a/packages/ipfs-http-client/src/dag/put.js +++ b/packages/ipfs-http-client/src/dag/put.js @@ -8,7 +8,7 @@ const multihash = require('multihashes') const configure = require('../lib/configure') const multipartRequest = require('../lib/multipart-request') const toUrlSearchParams = require('../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') const multicodec = require('multicodec') @@ -25,7 +25,10 @@ module.exports = configure((api, opts) => { formats[format.codec] = format }) - return async (dagNode, options = {}) => { + /** + * @type {import('..').Implements} + */ + const put = async (dagNode, options = {}) => { if (options.cid && (options.format || options.hashAlg)) { throw new Error('Failed to put DAG node. Provide either `cid` OR `format` and `hashAlg` options') } else if ((options.format && !options.hashAlg) || (!options.format && options.hashAlg)) { @@ -42,19 +45,20 @@ module.exports = configure((api, opts) => { delete options.cid } - options = { + const settings = { format: 'dag-cbor', hashAlg: 'sha2-256', inputEnc: 'raw', ...options } - const number = multicodec.getNumber(options.format) + const number = multicodec.getNumber(settings.format) let format = formats[number] if (!format) { if (opts && opts.ipld && opts.ipld.loadFormat) { - format = await opts.ipld.loadFormat(options.format) + // @ts-ignore - loadFormat expect string but it could be a number + format = await opts.ipld.loadFormat(settings.format) } if (!format) { @@ -70,18 +74,20 @@ module.exports = configure((api, opts) => { // allow aborting requests on body errors const controller = new AbortController() - const signal = anySignal([controller.signal, options.signal]) + const signal = anySignal([controller.signal, settings.signal]) const res = await api.post('dag/put', { - timeout: options.timeout, + timeout: settings.timeout, signal, - searchParams: toUrlSearchParams(options), + searchParams: toUrlSearchParams(settings), ...( - await multipartRequest(serialized, controller, options.headers) + await multipartRequest(serialized, controller, settings.headers) ) }) const data = await res.json() return new CID(data.Cid['/']) } + + return put }) diff --git a/packages/ipfs-http-client/src/dag/resolve.js b/packages/ipfs-http-client/src/dag/resolve.js index 3741139e6a..b0e2a56ce1 100644 --- a/packages/ipfs-http-client/src/dag/resolve.js +++ b/packages/ipfs-http-client/src/dag/resolve.js @@ -5,7 +5,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (ipfsPath, options = {}) => { + /** + * @type {import('..').Implements} + */ + const resolve = async (ipfsPath, options = {}) => { const res = await api.post('dag/resolve', { timeout: options.timeout, signal: options.signal, @@ -20,4 +23,6 @@ module.exports = configure(api => { return { cid: new CID(data.Cid['/']), remainderPath: data.RemPath } } + + return resolve }) diff --git a/packages/ipfs-http-client/src/dht/find-peer.js b/packages/ipfs-http-client/src/dht/find-peer.js index 40985143f3..c35c1da8b8 100644 --- a/packages/ipfs-http-client/src/dht/find-peer.js +++ b/packages/ipfs-http-client/src/dht/find-peer.js @@ -7,7 +7,10 @@ const toUrlSearchParams = require('../lib/to-url-search-params') const { FinalPeer } = require('./response-types') module.exports = configure(api => { - return async function findPeer (peerId, options = {}) { + /** + * @type {import('..').ImplementsMethod<'findPeer', import('ipfs-core/src/components/dht')>} + */ + async function findPeer (peerId, options = {}) { const res = await api.post('dht/findpeer', { timeout: options.timeout, signal: options.signal, @@ -30,4 +33,6 @@ module.exports = configure(api => { throw new Error('not found') } + + return findPeer }) diff --git a/packages/ipfs-http-client/src/dht/find-provs.js b/packages/ipfs-http-client/src/dht/find-provs.js index 77876f0a40..64e6a60b9e 100644 --- a/packages/ipfs-http-client/src/dht/find-provs.js +++ b/packages/ipfs-http-client/src/dht/find-provs.js @@ -7,7 +7,10 @@ const toUrlSearchParams = require('../lib/to-url-search-params') const { Provider } = require('./response-types') module.exports = configure(api => { - return async function * findProvs (cid, options = {}) { + /** + * @type {import('..').ImplementsMethod<'findProvs', import('ipfs-core/src/components/dht')>} + */ + async function * findProvs (cid, options = {}) { const res = await api.post('dht/findprovs', { timeout: options.timeout, signal: options.signal, @@ -29,4 +32,6 @@ module.exports = configure(api => { } } } + + return findProvs }) diff --git a/packages/ipfs-http-client/src/dht/get.js b/packages/ipfs-http-client/src/dht/get.js index 30ec158f97..990edb1790 100644 --- a/packages/ipfs-http-client/src/dht/get.js +++ b/packages/ipfs-http-client/src/dht/get.js @@ -7,7 +7,10 @@ const uint8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') module.exports = configure(api => { - return async function get (key, options = {}) { + /** + * @type {import('..').ImplementsMethod<'get', import('ipfs-core/src/components/dht')>} + */ + async function get (key, options = {}) { const res = await api.post('dht/get', { timeout: options.timeout, signal: options.signal, @@ -26,4 +29,6 @@ module.exports = configure(api => { throw new Error('not found') } + + return get }) diff --git a/packages/ipfs-http-client/src/dht/provide.js b/packages/ipfs-http-client/src/dht/provide.js index 3105a6f0ea..70cd14b558 100644 --- a/packages/ipfs-http-client/src/dht/provide.js +++ b/packages/ipfs-http-client/src/dht/provide.js @@ -7,7 +7,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function * provide (cids, options = {}) { + /** + * @type {import('..').ImplementsMethod<'provide', import('ipfs-core/src/components/dht')>} + */ + async function * provide (cids, options = {}) { cids = Array.isArray(cids) ? cids : [cids] const res = await api.post('dht/provide', { @@ -34,4 +37,6 @@ module.exports = configure(api => { yield message } } + + return provide }) diff --git a/packages/ipfs-http-client/src/dht/put.js b/packages/ipfs-http-client/src/dht/put.js index 7110fc2bd2..aeee8e0afd 100644 --- a/packages/ipfs-http-client/src/dht/put.js +++ b/packages/ipfs-http-client/src/dht/put.js @@ -6,18 +6,26 @@ const toCamel = require('../lib/object-to-camel') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') const multipartRequest = require('../lib/multipart-request') - +const { anySignal } = require('any-signal') +const AbortController = require('native-abort-controller') module.exports = configure(api => { - return async function * put (key, value, options = {}) { + /** + * @type {import('..').ImplementsMethod<'put', import('ipfs-core/src/components/dht')>} + */ + async function * put (key, value, options = {}) { + // allow aborting requests on body errors + const controller = new AbortController() + const signal = anySignal([controller.signal, options.signal]) + const res = await api.post('dht/put', { timeout: options.timeout, - signal: options.signal, + signal, searchParams: toUrlSearchParams({ arg: key, ...options }), ...( - await multipartRequest(value, options.headers) + await multipartRequest(value, controller, options.headers) ) }) @@ -33,4 +41,6 @@ module.exports = configure(api => { yield message } } + + return put }) diff --git a/packages/ipfs-http-client/src/dht/query.js b/packages/ipfs-http-client/src/dht/query.js index 7c77148d9d..7183f74d16 100644 --- a/packages/ipfs-http-client/src/dht/query.js +++ b/packages/ipfs-http-client/src/dht/query.js @@ -7,12 +7,15 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function * query (peerId, options = {}) { + /** + * @type {import('..').ImplementsMethod<'query', import('ipfs-core/src/components/dht')>} + */ + async function * query (peerId, options = {}) { const res = await api.post('dht/query', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ - arg: new CID(peerId), + arg: new CID(`${peerId}`), ...options }), headers: options.headers @@ -28,4 +31,6 @@ module.exports = configure(api => { yield message } } + + return query }) diff --git a/packages/ipfs-http-client/src/dns.js b/packages/ipfs-http-client/src/dns.js index d91368a532..2786cc42ee 100644 --- a/packages/ipfs-http-client/src/dns.js +++ b/packages/ipfs-http-client/src/dns.js @@ -4,7 +4,10 @@ const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') module.exports = configure(api => { - return async (domain, options = {}) => { + /** + * @type {import('.').Implements} + */ + const dns = async (domain, options = {}) => { const res = await api.post('dns', { timeout: options.timeout, signal: options.signal, @@ -18,4 +21,6 @@ module.exports = configure(api => { return data.Path } + + return dns }) diff --git a/packages/ipfs-http-client/src/files/chmod.js b/packages/ipfs-http-client/src/files/chmod.js index a6853e5d41..0a40a46f79 100644 --- a/packages/ipfs-http-client/src/files/chmod.js +++ b/packages/ipfs-http-client/src/files/chmod.js @@ -4,7 +4,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function chmod (path, mode, options = {}) { + /** + * @type {import('..').Implements} + */ + async function chmod (path, mode, options = {}) { const res = await api.post('files/chmod', { timeout: options.timeout, signal: options.signal, @@ -18,4 +21,6 @@ module.exports = configure(api => { await res.text() } + + return chmod }) diff --git a/packages/ipfs-http-client/src/files/cp.js b/packages/ipfs-http-client/src/files/cp.js index 39449f60fa..0513071644 100644 --- a/packages/ipfs-http-client/src/files/cp.js +++ b/packages/ipfs-http-client/src/files/cp.js @@ -6,7 +6,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (...args) => { + /** + * @type {import('..').Implements} + */ + async function cp (...args) { const { sources, options } = findSources(args) const res = await api.post('files/cp', { @@ -21,4 +24,6 @@ module.exports = configure(api => { await res.text() } + + return cp }) diff --git a/packages/ipfs-http-client/src/files/flush.js b/packages/ipfs-http-client/src/files/flush.js index d9934be9ad..344e87874f 100644 --- a/packages/ipfs-http-client/src/files/flush.js +++ b/packages/ipfs-http-client/src/files/flush.js @@ -5,7 +5,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (path, options = {}) => { + /** + * @type {import('..').Implements} + */ + async function flush (path, options = {}) { if (!path || typeof path !== 'string') { throw new Error('ipfs.files.flush requires a path') } @@ -23,4 +26,6 @@ module.exports = configure(api => { return new CID(data.Cid) } + + return flush }) diff --git a/packages/ipfs-http-client/src/files/ls.js b/packages/ipfs-http-client/src/files/ls.js index 2420955e74..77bd3b0a30 100644 --- a/packages/ipfs-http-client/src/files/ls.js +++ b/packages/ipfs-http-client/src/files/ls.js @@ -6,7 +6,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function * ls (path, options = {}) { + /** + * @type {import('..').Implements} + */ + async function * ls (path, options = {}) { if (!path || typeof path !== 'string') { throw new Error('ipfs.files.ls requires a path') } @@ -35,6 +38,8 @@ module.exports = configure(api => { } } } + + return ls }) function toCoreInterface (entry) { diff --git a/packages/ipfs-http-client/src/files/mkdir.js b/packages/ipfs-http-client/src/files/mkdir.js index 532add9d8c..0fe84790e4 100644 --- a/packages/ipfs-http-client/src/files/mkdir.js +++ b/packages/ipfs-http-client/src/files/mkdir.js @@ -4,7 +4,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (path, options = {}) => { + /** + * @type {import('..').Implements} + */ + async function mkdir (path, options = {}) { const res = await api.post('files/mkdir', { timeout: options.timeout, signal: options.signal, @@ -17,4 +20,6 @@ module.exports = configure(api => { await res.text() } + + return mkdir }) diff --git a/packages/ipfs-http-client/src/files/mv.js b/packages/ipfs-http-client/src/files/mv.js index db9d648c7a..ad717590b3 100644 --- a/packages/ipfs-http-client/src/files/mv.js +++ b/packages/ipfs-http-client/src/files/mv.js @@ -6,7 +6,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (...args) => { + /** + * @type {import('..').Implements} + */ + async function mv (...args) { const { sources, options } = findSources(args) const res = await api.post('files/mv', { @@ -21,4 +24,6 @@ module.exports = configure(api => { await res.text() } + + return mv }) diff --git a/packages/ipfs-http-client/src/files/read.js b/packages/ipfs-http-client/src/files/read.js index fb1fc048a1..10e9399e71 100644 --- a/packages/ipfs-http-client/src/files/read.js +++ b/packages/ipfs-http-client/src/files/read.js @@ -5,13 +5,16 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function * read (path, options = {}) { + /** + * @type {import('..').Implements} + */ + async function * read (path, options = {}) { const res = await api.post('files/read', { timeout: options.timeout, signal: options.signal, searchParams: toUrlSearchParams({ arg: path, - count: options.count || options.length, + count: options.length, ...options }), headers: options.headers @@ -19,4 +22,6 @@ module.exports = configure(api => { yield * toIterable(res.body) } + + return read }) diff --git a/packages/ipfs-http-client/src/files/rm.js b/packages/ipfs-http-client/src/files/rm.js index b07e5a3936..d8eba8db39 100644 --- a/packages/ipfs-http-client/src/files/rm.js +++ b/packages/ipfs-http-client/src/files/rm.js @@ -5,7 +5,10 @@ const { findSources } = require('./utils') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (...args) => { + /** + * @type {import('..').Implements} + */ + async function rm (...args) { const { sources, options } = findSources(args) const res = await api.post('files/rm', { @@ -20,4 +23,6 @@ module.exports = configure(api => { await res.text() } + + return rm }) diff --git a/packages/ipfs-http-client/src/files/stat.js b/packages/ipfs-http-client/src/files/stat.js index 09cec42e07..6df5e31c62 100644 --- a/packages/ipfs-http-client/src/files/stat.js +++ b/packages/ipfs-http-client/src/files/stat.js @@ -6,7 +6,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async (path, options = {}) => { + /** + * @type {import('..').Implements} + */ + async function stat (path, options = {}) { if (typeof path !== 'string') { options = path || {} path = '/' @@ -26,6 +29,8 @@ module.exports = configure(api => { data.WithLocality = data.WithLocality || false return toCoreInterface(toCamelWithMetadata(data)) } + + return stat }) function toCoreInterface (entry) { diff --git a/packages/ipfs-http-client/src/files/touch.js b/packages/ipfs-http-client/src/files/touch.js index 47fa870894..743c3230c1 100644 --- a/packages/ipfs-http-client/src/files/touch.js +++ b/packages/ipfs-http-client/src/files/touch.js @@ -4,7 +4,10 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') module.exports = configure(api => { - return async function touch (path, options = {}) { + /** + * @type {import('..').Implements} + */ + async function touch (path, options = {}) { const res = await api.post('files/touch', { timeout: options.timeout, signal: options.signal, @@ -17,4 +20,6 @@ module.exports = configure(api => { await res.text() } + + return touch }) diff --git a/packages/ipfs-http-client/src/files/write.js b/packages/ipfs-http-client/src/files/write.js index f4fe9dc0c3..08b598397a 100644 --- a/packages/ipfs-http-client/src/files/write.js +++ b/packages/ipfs-http-client/src/files/write.js @@ -5,11 +5,14 @@ const mtimeToObject = require('../lib/mtime-to-object') const configure = require('../lib/configure') const multipartRequest = require('../lib/multipart-request') const toUrlSearchParams = require('../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure(api => { - return async (path, input, options = {}) => { + /** + * @type {import('..').Implements} + */ + async function write (path, input, options = {}) { // allow aborting requests on body errors const controller = new AbortController() const signal = anySignal([controller.signal, options.signal]) @@ -20,7 +23,7 @@ module.exports = configure(api => { searchParams: toUrlSearchParams({ arg: path, streamChannels: true, - count: options.count || options.length, + count: options.length, ...options }), ...( @@ -35,4 +38,6 @@ module.exports = configure(api => { await res.text() } + + return write }) diff --git a/packages/ipfs-http-client/src/get.js b/packages/ipfs-http-client/src/get.js index 387d5fd797..8ceaa14863 100644 --- a/packages/ipfs-http-client/src/get.js +++ b/packages/ipfs-http-client/src/get.js @@ -7,7 +7,10 @@ const toUrlSearchParams = require('./lib/to-url-search-params') const map = require('it-map') module.exports = configure(api => { - return async function * get (path, options = {}) { + /** + * @type {import('.').Implements} + */ + async function * get (path, options = {}) { const res = await api.post('get', { timeout: options.timeout, signal: options.signal, @@ -22,15 +25,23 @@ module.exports = configure(api => { for await (const { header, body } of extractor(res.iterator())) { if (header.type === 'directory') { + // @ts-ignore - Missing the following properties from type 'Directory': + // cid, name, size, depthts yield { + type: 'dir', path: header.name } } else { + // @ts-ignore - Missing the following properties from type 'File': + // cid, name, size, depthts yield { + type: 'file', path: header.name, content: map(body, (chunk) => chunk.slice()) // convert bl to Buffer/Uint8Array } } } } + + return get }) diff --git a/packages/ipfs-http-client/src/id.js b/packages/ipfs-http-client/src/id.js index f5a6eccd71..89d1d95641 100644 --- a/packages/ipfs-http-client/src/id.js +++ b/packages/ipfs-http-client/src/id.js @@ -7,7 +7,7 @@ const toUrlSearchParams = require('./lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../ipfs/src/core/components/id').Id} + * @type {import('.').Implements} */ async function id (options = {}) { const res = await api.post('id', { diff --git a/packages/ipfs-http-client/src/index.js b/packages/ipfs-http-client/src/index.js index ce8fb72a8c..65e7d7c482 100644 --- a/packages/ipfs-http-client/src/index.js +++ b/packages/ipfs-http-client/src/index.js @@ -10,20 +10,7 @@ const globSource = require('ipfs-utils/src/files/glob-source') const urlSource = require('ipfs-utils/src/files/url-source') /** - * @typedef { import("./lib/core").ClientOptions } ClientOptions - */ - -/** - * @typedef {object} HttpOptions - * @property {Headers | Record} [headers] - An object or [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) instance that can be used to set custom HTTP headers. Note that this option can also be [configured globally](#custom-headers) via the constructor options. - * @property {URLSearchParams | Record} [searchParams] - An object or [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams) instance that can be used to add additional query parameters to the query string sent with each request. - * @property {object} [ipld] - * @property {any[]} [ipld.formats] - An array of additional [IPLD formats](https://github.com/ipld/interface-ipld-format) to support - * @property {(format: string) => Promise} [ipld.loadFormat] - an async function that takes the name of an [IPLD format](https://github.com/ipld/interface-ipld-format) as a string and should return the implementation of that codec - */ - -/** - * @param {ClientOptions} options + * @param {import("./lib/core").ClientOptions} options */ function ipfsClient (options = {}) { return { @@ -66,3 +53,40 @@ function ipfsClient (options = {}) { Object.assign(ipfsClient, { CID, multiaddr, multibase, multicodec, multihash, globSource, urlSource }) module.exports = ipfsClient + +/** + * @typedef {Object} HttpOptions + * @property {Headers | Record} [headers] - An object or [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) instance that can be used to set custom HTTP headers. Note that this option can also be [configured globally](#custom-headers) via the constructor options. + * @property {URLSearchParams | Record} [searchParams] - An object or [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams) instance that can be used to add additional query parameters to the query string sent with each request. + * + * @typedef {import('ipfs-core/src/utils').AbortOptions} AbortOptions} + */ + +/** + * This is an utility type that can be used to derive type of the HTTP Client + * API from the Core API. It takes type of the API factory (from ipfs-core), + * derives API from it's return type and extends it last `options` parameter + * with `HttpOptions`. + * + * This can be used to avoid (re)typing API interface when implemeting it in + * http client e.g you can annotate `ipfs.addAll` implementation with + * + * `@type {Implements}` + * + * **Caution**: This supports APIs with up to four parameters and last optional + * `options` parameter, anything else will result to `never` type. + * + * @template {(config:any) => any} APIFactory + * @typedef {APIWithExtraOptions, HttpOptions>} Implements + */ + +/** + * @template Key + * @template {(config:any) => any} APIFactory + * @typedef {import('./interface').APIMethadWithExtraOptions, Key, HttpOptions>} ImplementsMethod + */ + +/** + * @template API, Extra + * @typedef {import('./interface').APIWithExtraOptions} APIWithExtraOptions + */ diff --git a/packages/ipfs-http-client/src/interface.ts b/packages/ipfs-http-client/src/interface.ts new file mode 100644 index 0000000000..34b698cc9f --- /dev/null +++ b/packages/ipfs-http-client/src/interface.ts @@ -0,0 +1,58 @@ +// This file contains some utility types that either can't be expressed in +// JSDoc syntax or that result in a different behavior when typed in JSDoc. + +/** + * Utility type that takes IPFS Core API function type (with 0 to 4 arguments + * & last **optional** `options` parameter) and derives a function type with + * `options` parameter extended with given `Extra` options. + * + * **Caution**: API Functions with more than for arguments ahead of `options` + * will result to `never` type. API function that does not take `options` will + * result in function whose last argument is extended with `Extra` which would + * be an error. + */ +// This is typed in TS file because otherwise TS unifies on the first parameter +// regardless of number of parameters function has. +export type APIWithExtraOptions any, Extra> = + (...args: WithExtendedOptions, Extra>) => ReturnType + +type End = never[] +type WithExtendedOptions = Params extends [...End] + ? [] + // (options?: Options) -> (options?: Options & Ext) + : Params extends [options?: infer Options, ...end: End] + ? [options?: Options & Ext] + // (a: A1, options?: Options) -> (a1: A1, options?: Options & Ext) + : Params extends [a1: infer A1, options?: infer Options, ...end: End] + ? [a1: A1, options?: Options & Ext] + // (a1?: A1, options?: Options) -> (a1?: A1, options?: Options & Ext) + : Params extends [a1?: infer A1, options?: infer Options, ...end: End] + ? [a1?: A1, options?: Options & Ext] + // (a1: A1, a2: A2, options?: Options) -> (a1: A1, a2: A2 options?: Options & Ext) + : Params extends [a1: infer A1, a2: infer A2, options?: infer Options, ...end: End] + ? [a1: A1, a2: A2, options?: Options & Ext] + // (a1: A1, a2?: A2, options?: Options) -> (a1: A1, a2?: A2 options?: Options & Ext) + : Params extends [a1: infer A1, a2?: infer A2, options?: infer Options, ...end: End] + ? [a1: A1, a2?: A2, options?: Options & Ext] + // (a1: A1, a2?: A2, options?: Options) -> (a1: A1, a2?: A2 options?: Options & Ext) + : Params extends [a1?: infer A1, a2?: infer A2, options?: infer Options, ...end: End] + ? [a1?: A1, a2?: A2, options?: Options & Ext] + // (a1: A1, a2: A2, a3:A3 options?: Options) -> (a1: A1, a2: A2, a3:A3, options?: Options & Ext) + : Params extends [a1: infer A1, a2: infer A2, a3:infer A3, options?: infer Options, ...end: End] + ? [a1: A1, a2: A2, a3: A3, options?: Options & Ext] + // (a1: A1, a2: A2, a3?:A3 options?: Options) -> (a1: A1, a2: A2, a3?:A3, options?: Options & Ext) + : Params extends [a1: infer A1, a2:infer A2, a3?: infer A3, options?: infer Options, ...end: End] + ? [a1: A1, a2: A2, a3?: A3, options?: Options & Ext] + // (a1: A1, a2?: A2, a3?:A3 options?: Options) -> (a1: A1, a2?: A2, a3?:A3, options?: Options & Ext) + : Params extends [a1: infer A1, a2?: infer A2, a3?: infer A3, options?: infer Options, ...end: End] + ? [a1: A1, a2?: A2, a3?: A3, options?: Options & Ext] + // (a1?: A1, a2?: A2, a3?:A3 options?: Options) -> (a1?: A1, a2?: A2, a3?:A3, options?: Options & Ext) + : Params extends [a1?: infer A1, a2?: infer A2, a3?: infer A3, options?: infer Options, ...end: End] + ? [a1?: A1, a2?: A2, a3?: A3, options?: Options & Ext] + : never + +export type APIMethadWithExtraOptions < + API, + Key extends keyof API, + Extra + > = API[Key] extends (...args: any[]) => any ? APIWithExtraOptions : never diff --git a/packages/ipfs-http-client/src/lib/core.js b/packages/ipfs-http-client/src/lib/core.js index f41f2515bc..44f1774bae 100644 --- a/packages/ipfs-http-client/src/lib/core.js +++ b/packages/ipfs-http-client/src/lib/core.js @@ -9,6 +9,10 @@ const log = require('debug')('ipfs-http-client:lib:error-handler') const HTTP = require('ipfs-utils/src/http') const merge = require('merge-options') +/** + * @param {any} input + * @returns {input is Multiaddr} + */ const isMultiaddr = (input) => { try { Multiaddr(input) // eslint-disable-line no-new @@ -18,6 +22,10 @@ const isMultiaddr = (input) => { } } +/** + * @param {any} options + * @returns {ClientOptions} + */ const normalizeInput = (options = {}) => { if (isMultiaddr(options)) { options = { url: toUri(options) } @@ -104,15 +112,15 @@ const parseTimeout = (value) => { * @property {number|string} [timeout] - Amount of time until request should timeout in ms or humand readable. https://www.npmjs.com/package/parse-duration for valid string values. * @property {string} [apiPath] - Path to the API. * @property {URL|string} [url] - Full API URL. + * @property {object} [ipld] + * @property {any[]} [ipld.formats] - An array of additional [IPLD formats](https://github.com/ipld/interface-ipld-format) to support + * @property {(format: string) => Promise} [ipld.loadFormat] - an async function that takes the name of an [IPLD format](https://github.com/ipld/interface-ipld-format) as a string and should return the implementation of that codec */ - class Client extends HTTP { /** - * - * @param {ClientOptions|URL|Multiaddr|string} options + * @param {ClientOptions|URL|Multiaddr|string} [options] */ constructor (options = {}) { - /** @type {ClientOptions} */ const opts = normalizeInput(options) super({ timeout: parseTimeout(opts.timeout) || 60000 * 20, diff --git a/packages/ipfs-http-client/src/lib/mtime-to-object.js b/packages/ipfs-http-client/src/lib/mtime-to-object.js index be89148f64..f7c4d3dbc9 100644 --- a/packages/ipfs-http-client/src/lib/mtime-to-object.js +++ b/packages/ipfs-http-client/src/lib/mtime-to-object.js @@ -1,5 +1,9 @@ 'use strict' +/** + * @param {any} mtime + * @returns {{secs:number, nsecs:number}|undefined} + */ module.exports = function parseMtime (mtime) { if (mtime == null) { return undefined diff --git a/packages/ipfs-http-client/src/lib/multipart-request.browser.js b/packages/ipfs-http-client/src/lib/multipart-request.browser.js index 5c2bc3e411..9b75e9d829 100644 --- a/packages/ipfs-http-client/src/lib/multipart-request.browser.js +++ b/packages/ipfs-http-client/src/lib/multipart-request.browser.js @@ -24,10 +24,9 @@ async function multipartRequest (source = '', abortController, headers = {}) { qs.push(`mode=${modeToString(mode)}`) } - if (mtime != null) { - const { - secs, nsecs - } = mtimeToObject(mtime) + const time = mtimeToObject(mtime) + if (time != null) { + const { secs, nsecs } = time qs.push(`mtime=${secs}`) diff --git a/packages/ipfs-http-client/src/lib/multipart-request.js b/packages/ipfs-http-client/src/lib/multipart-request.js index fdde2248bd..e7db72413b 100644 --- a/packages/ipfs-http-client/src/lib/multipart-request.js +++ b/packages/ipfs-http-client/src/lib/multipart-request.js @@ -1,6 +1,6 @@ 'use strict' -const normaliseInput = require('ipfs-core-utils/src/files/normalise-input') +const normaliseInput = require('ipfs-core-utils/src/files/normalise-input/index') const { nanoid } = require('nanoid') const modeToString = require('../lib/mode-to-string') const mtimeToObject = require('../lib/mtime-to-object') @@ -8,6 +8,13 @@ const merge = require('merge-options').bind({ ignoreUndefined: true }) const toStream = require('it-to-stream') const { isElectronRenderer } = require('ipfs-utils/src/env') +/** + * + * @param {Object} source + * @param {AbortController} abortController + * @param {Headers|Record} [headers] + * @param {string} [boundary] + */ async function multipartRequest (source = '', abortController, headers = {}, boundary = `-----------------------------${nanoid()}`) { async function * streamFiles (source) { try { @@ -30,10 +37,9 @@ async function multipartRequest (source = '', abortController, headers = {}, bou qs.push(`mode=${modeToString(mode)}`) } - if (mtime != null) { - const { - secs, nsecs - } = mtimeToObject(mtime) + const time = mtimeToObject(mtime) + if (time != null) { + const { secs, nsecs } = time qs.push(`mtime=${secs}`) @@ -59,6 +65,7 @@ async function multipartRequest (source = '', abortController, headers = {}, bou } } catch (err) { // workaround for https://github.com/node-fetch/node-fetch/issues/753 + // @ts-ignore - abort does not expect an arguments abortController.abort(err) } finally { yield `\r\n--${boundary}--\r\n` diff --git a/packages/ipfs-http-client/src/object/patch/append-data.js b/packages/ipfs-http-client/src/object/patch/append-data.js index 36bd09964d..e3e772a96f 100644 --- a/packages/ipfs-http-client/src/object/patch/append-data.js +++ b/packages/ipfs-http-client/src/object/patch/append-data.js @@ -4,7 +4,7 @@ const CID = require('cids') const multipartRequest = require('../../lib/multipart-request') const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure(api => { diff --git a/packages/ipfs-http-client/src/object/patch/set-data.js b/packages/ipfs-http-client/src/object/patch/set-data.js index b3db064d4b..207d8035f8 100644 --- a/packages/ipfs-http-client/src/object/patch/set-data.js +++ b/packages/ipfs-http-client/src/object/patch/set-data.js @@ -4,7 +4,7 @@ const CID = require('cids') const multipartRequest = require('../../lib/multipart-request') const configure = require('../../lib/configure') const toUrlSearchParams = require('../../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure(api => { diff --git a/packages/ipfs-http-client/src/object/put.js b/packages/ipfs-http-client/src/object/put.js index 1eed43a3c1..65eff020c3 100644 --- a/packages/ipfs-http-client/src/object/put.js +++ b/packages/ipfs-http-client/src/object/put.js @@ -5,7 +5,7 @@ const { DAGNode } = require('ipld-dag-pb') const multipartRequest = require('../lib/multipart-request') const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') const unit8ArrayToString = require('uint8arrays/to-string') const uint8ArrayFromString = require('uint8arrays/from-string') diff --git a/packages/ipfs-http-client/src/pin/rm-all.js b/packages/ipfs-http-client/src/pin/rm-all.js index 294ada306f..e19edd68e0 100644 --- a/packages/ipfs-http-client/src/pin/rm-all.js +++ b/packages/ipfs-http-client/src/pin/rm-all.js @@ -11,7 +11,7 @@ module.exports = configure(api => { const searchParams = new URLSearchParams(options.searchParams) searchParams.append('arg', `${path}`) - if (recursive != null) searchParams.set('recursive', recursive) + if (recursive != null) searchParams.set('recursive', String(recursive)) const res = await api.post('pin/rm', { timeout: options.timeout, diff --git a/packages/ipfs-http-client/src/pubsub/publish.js b/packages/ipfs-http-client/src/pubsub/publish.js index d83cdb2526..1a48f76e8c 100644 --- a/packages/ipfs-http-client/src/pubsub/publish.js +++ b/packages/ipfs-http-client/src/pubsub/publish.js @@ -3,7 +3,7 @@ const configure = require('../lib/configure') const toUrlSearchParams = require('../lib/to-url-search-params') const multipartRequest = require('../lib/multipart-request') -const anySignal = require('any-signal') +const { anySignal } = require('any-signal') const AbortController = require('native-abort-controller') module.exports = configure(api => { diff --git a/packages/ipfs-http-client/src/resolve.js b/packages/ipfs-http-client/src/resolve.js index 0a4458fbf1..a8dbfa608e 100644 --- a/packages/ipfs-http-client/src/resolve.js +++ b/packages/ipfs-http-client/src/resolve.js @@ -5,7 +5,7 @@ const toUrlSearchParams = require('./lib/to-url-search-params') module.exports = configure(api => { /** - * @type {import('../../ipfs/src/core/components/resolve').Resolve} + * @type {import('.').Implements} */ async function resolve (path, options = {}) { const res = await api.post('resolve', { diff --git a/packages/ipfs-http-client/src/version.js b/packages/ipfs-http-client/src/version.js index 71755a7550..c2cc538e36 100644 --- a/packages/ipfs-http-client/src/version.js +++ b/packages/ipfs-http-client/src/version.js @@ -3,10 +3,11 @@ const toCamel = require('./lib/object-to-camel') const configure = require('./lib/configure') const toUrlSearchParams = require('./lib/to-url-search-params') +const pkg = require('../package.json') module.exports = configure(api => { /** - * @type {import('../../ipfs/src/core/components/version').Version} + * @type {import('.').Implements} */ async function version (options = {}) { const res = await api.post('version', { @@ -15,9 +16,12 @@ module.exports = configure(api => { searchParams: toUrlSearchParams(options), headers: options.headers }) - const data = await res.json() - return toCamel(data) + const data = toCamel(await res.json()) + data['ipfs-http-client'] = pkg.version + + return data } + return version }) diff --git a/packages/ipfs-http-client/tsconfig.json b/packages/ipfs-http-client/tsconfig.json new file mode 100644 index 0000000000..bbdcd5851e --- /dev/null +++ b/packages/ipfs-http-client/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "package.json" + ], + "references": [ + { + "path": "../ipfs-core-utils" + }, + { + "path": "../ipfs-core" + } + ] +} diff --git a/packages/ipfs-http-gateway/package.json b/packages/ipfs-http-gateway/package.json index 2a48af3b38..b51e996344 100644 --- a/packages/ipfs-http-gateway/package.json +++ b/packages/ipfs-http-gateway/package.json @@ -20,7 +20,8 @@ }, "scripts": { "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:types", + "build:types": "tsc --build", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", @@ -46,8 +47,9 @@ "uri-to-multiaddr": "^4.0.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "file-type": "^15.0.1", - "sinon": "^9.0.3" + "sinon": "^9.0.3", + "typescript": "^4.0.3" } } diff --git a/packages/ipfs-http-gateway/src/index.js b/packages/ipfs-http-gateway/src/index.js index db0613b52b..f1e5766336 100644 --- a/packages/ipfs-http-gateway/src/index.js +++ b/packages/ipfs-http-gateway/src/index.js @@ -37,11 +37,12 @@ async function serverCreator (serverAddrs, createServer, ipfs, cors) { } class HttpGateway { - constructor (ipfs, options) { + constructor (ipfs, options = {}) { this._ipfs = ipfs - this._options = options || {} - this._log = debug(LOG) - this._log.error = debug(LOG_ERROR) + this._options = {} + this._log = Object.assign(debug(LOG), { + error: debug(LOG_ERROR) + }) } async start () { diff --git a/packages/ipfs-http-gateway/src/resources/gateway.js b/packages/ipfs-http-gateway/src/resources/gateway.js index eac67f6e60..def38dd744 100644 --- a/packages/ipfs-http-gateway/src/resources/gateway.js +++ b/packages/ipfs-http-gateway/src/resources/gateway.js @@ -14,8 +14,9 @@ const toStream = require('it-to-stream') const PathUtils = require('../utils/path') const { cidToString } = require('ipfs-core-utils/src/cid') -const log = debug('ipfs:http-gateway') -log.error = debug('ipfs:http-gateway:error') +const log = Object.assign(debug('ipfs:http-gateway'), { + error: debug('ipfs:http-gateway:error') +}) module.exports = { diff --git a/packages/ipfs-http-gateway/tsconfig.json b/packages/ipfs-http-gateway/tsconfig.json new file mode 100644 index 0000000000..bbdcd5851e --- /dev/null +++ b/packages/ipfs-http-gateway/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "package.json" + ], + "references": [ + { + "path": "../ipfs-core-utils" + }, + { + "path": "../ipfs-core" + } + ] +} diff --git a/packages/ipfs-http-server/package.json b/packages/ipfs-http-server/package.json index 11129353e4..9fdd0a6897 100644 --- a/packages/ipfs-http-server/package.json +++ b/packages/ipfs-http-server/package.json @@ -20,7 +20,8 @@ }, "scripts": { "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:types", + "build:types": "tsc --build", "test": "npm run test:node", "test:node": "aegir test -t node", "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", @@ -70,13 +71,14 @@ "uri-to-multiaddr": "^4.0.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "form-data": "^3.0.0", "iso-random-stream": "^1.1.1", "it-to-buffer": "^1.0.2", "qs": "^6.9.4", "sinon": "^9.0.3", - "stream-to-promise": "^3.0.0" + "stream-to-promise": "^3.0.0", + "typescript": "^4.0.3" }, "optionalDependencies": { "prom-client": "^12.0.0" diff --git a/packages/ipfs-http-server/src/api/resources/block.js b/packages/ipfs-http-server/src/api/resources/block.js index 8607fc9ce6..5d8d9c65fa 100644 --- a/packages/ipfs-http-server/src/api/resources/block.js +++ b/packages/ipfs-http-server/src/api/resources/block.js @@ -6,14 +6,11 @@ const multipart = require('../../utils/multipart-request-parser') const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') const { cidToString } = require('ipfs-core-utils/src/cid') -const debug = require('debug') const all = require('it-all') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const { map } = require('streaming-iterables') const ndjson = require('iterable-ndjson') const streamResponse = require('../../utils/stream-response') -const log = debug('ipfs:http-api:block') -log.error = debug('ipfs:http-api:block:error') exports.get = { options: { @@ -79,7 +76,7 @@ exports.put = { }, pre: [{ assign: 'args', - method: async (request, h) => { + method: async (request, _h) => { if (!request.payload) { throw Boom.badRequest("File argument 'data' is required") } diff --git a/packages/ipfs-http-server/src/api/resources/config.js b/packages/ipfs-http-server/src/api/resources/config.js index 06ce17004f..67504f1fd9 100644 --- a/packages/ipfs-http-server/src/api/resources/config.js +++ b/packages/ipfs-http-server/src/api/resources/config.js @@ -3,8 +3,9 @@ const debug = require('debug') const get = require('dlv') const set = require('just-safe-set') -const log = debug('ipfs:http-api:config') -log.error = debug('ipfs:http-api:config:error') +const log = Object.assign(debug('ipfs:http-api:config'), { + error: debug('ipfs:http-api:config:error') +}) const multipart = require('../../utils/multipart-request-parser') const Boom = require('@hapi/boom') const Joi = require('../../utils/joi') @@ -18,7 +19,7 @@ exports.getOrSet = { }, pre: [{ assign: 'args', - method: (request, h) => { + method: (request, _h) => { const parseValue = (args) => { if (request.query.bool) { args.value = args.value === 'true' @@ -233,7 +234,7 @@ exports.replace = { }, pre: [{ assign: 'args', - method: async (request, h) => { + method: async (request, _h) => { if (!request.payload) { throw Boom.badRequest("Argument 'file' is required") } diff --git a/packages/ipfs-http-server/src/api/resources/dag.js b/packages/ipfs-http-server/src/api/resources/dag.js index f0d49e4cf5..7f3c4a7bf3 100644 --- a/packages/ipfs-http-server/src/api/resources/dag.js +++ b/packages/ipfs-http-server/src/api/resources/dag.js @@ -5,13 +5,10 @@ const mh = require('multihashing-async').multihash const Joi = require('../../utils/joi') const multicodec = require('multicodec') const Boom = require('@hapi/boom') -const debug = require('debug') const { cidToString } = require('ipfs-core-utils/src/cid') const all = require('it-all') -const log = debug('ipfs:http-api:dag') -log.error = debug('ipfs:http-api:dag:error') const uint8ArrayToString = require('uint8arrays/to-string') const IpldFormats = { @@ -159,7 +156,7 @@ exports.put = { }, pre: [{ assign: 'args', - method: async (request, h) => { + method: async (request, _h) => { if (!request.payload) { throw Boom.badRequest("File argument 'object data' is required") } diff --git a/packages/ipfs-http-server/src/api/resources/dht.js b/packages/ipfs-http-server/src/api/resources/dht.js index 04e0536a42..1cfa2f42d2 100644 --- a/packages/ipfs-http-server/src/api/resources/dht.js +++ b/packages/ipfs-http-server/src/api/resources/dht.js @@ -2,15 +2,16 @@ const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const ndjson = require('iterable-ndjson') const toStream = require('it-to-stream') const { map } = require('streaming-iterables') const { PassThrough } = require('stream') const toIterable = require('stream-to-it') const debug = require('debug') -const log = debug('ipfs:http-api:dht') -log.error = debug('ipfs:http-api:dht:error') +const log = Object.assign(debug('ipfs:http-api:dht'), { + error: debug('ipfs:http-api:dht:error') +}) exports.findPeer = { options: { diff --git a/packages/ipfs-http-server/src/api/resources/files-regular.js b/packages/ipfs-http-server/src/api/resources/files-regular.js index 122ffcc04a..af1bf6f400 100644 --- a/packages/ipfs-http-server/src/api/resources/files-regular.js +++ b/packages/ipfs-http-server/src/api/resources/files-regular.js @@ -3,14 +3,15 @@ const multipart = require('../../utils/multipart-request-parser') const debug = require('debug') const tar = require('it-tar') -const log = debug('ipfs:http-api:files') -log.error = debug('ipfs:http-api:files:error') +const log = Object.assign(debug('ipfs:http-api:files'), { + error: debug('ipfs:http-api:files:error') +}) const toIterable = require('stream-to-it') const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') const { PassThrough } = require('stream') const { cidToString } = require('ipfs-core-utils/src/cid') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const all = require('it-all') const ndjson = require('iterable-ndjson') const { map } = require('streaming-iterables') diff --git a/packages/ipfs-http-server/src/api/resources/files/ls.js b/packages/ipfs-http-server/src/api/resources/files/ls.js index 1180b424dd..a8aa7d9fc8 100644 --- a/packages/ipfs-http-server/src/api/resources/files/ls.js +++ b/packages/ipfs-http-server/src/api/resources/files/ls.js @@ -3,7 +3,7 @@ const Joi = require('../../../utils/joi') const all = require('it-all') const map = require('it-map') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const streamResponse = require('../../../utils/stream-response') const mapEntry = (entry, options = {}) => { diff --git a/packages/ipfs-http-server/src/api/resources/name.js b/packages/ipfs-http-server/src/api/resources/name.js index a7663728af..539b84db49 100644 --- a/packages/ipfs-http-server/src/api/resources/name.js +++ b/packages/ipfs-http-server/src/api/resources/name.js @@ -1,7 +1,7 @@ 'use strict' const Joi = require('../../utils/joi') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const { map } = require('streaming-iterables') const last = require('it-last') const ndjson = require('iterable-ndjson') diff --git a/packages/ipfs-http-server/src/api/resources/object.js b/packages/ipfs-http-server/src/api/resources/object.js index fbd30f884a..69290fb939 100644 --- a/packages/ipfs-http-server/src/api/resources/object.js +++ b/packages/ipfs-http-server/src/api/resources/object.js @@ -10,10 +10,11 @@ const Boom = require('@hapi/boom') const uint8ArrayToString = require('uint8arrays/to-string') const { cidToString } = require('ipfs-core-utils/src/cid') const debug = require('debug') -const log = debug('ipfs:http-api:object') -log.error = debug('ipfs:http-api:object:error') +const log = Object.assign(debug('ipfs:http-api:object'), { + error: debug('ipfs:http-api:object:error') +}) -const readFilePart = async (request, h) => { +const readFilePart = async (request, _h) => { if (!request.payload) { throw Boom.badRequest("File argument 'data' is required") } @@ -530,13 +531,13 @@ exports.patchAppendData = { const answer = { Data: nodeJSON.data, - Hash: cidToString(newCid, { cidBase, upgrade: false }), + Hash: cidToString(newCid, { base: cidBase, upgrade: false }), Size: nodeJSON.size, Links: nodeJSON.links.map((l) => { return { Name: l.name, Size: l.size, - Hash: cidToString(l.cid, { cidBase, upgrade: false }) + Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) } }) } @@ -616,12 +617,12 @@ exports.patchSetData = { const nodeJSON = node.toJSON() return h.response({ - Hash: cidToString(newCid, { cidBase, upgrade: false }), + Hash: cidToString(newCid, { base: cidBase, upgrade: false }), Links: nodeJSON.links.map((l) => { return { Name: l.name, Size: l.size, - Hash: cidToString(l.cid, { cidBase, upgrade: false }) + Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) } }) }) @@ -701,13 +702,13 @@ exports.patchAddLink = { const answer = { Data: nodeJSON.data, - Hash: cidToString(cid, { cidBase, upgrade: false }), + Hash: cidToString(cid, { base: cidBase, upgrade: false }), Size: nodeJSON.size, Links: nodeJSON.links.map((l) => { return { Name: l.name, Size: l.size, - Hash: cidToString(l.cid, { cidBase, upgrade: false }) + Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) } }) } @@ -783,13 +784,13 @@ exports.patchRmLink = { const answer = { Data: nodeJSON.data, - Hash: cidToString(cid, { cidBase, upgrade: false }), + Hash: cidToString(cid, { base: cidBase, upgrade: false }), Size: nodeJSON.size, Links: nodeJSON.links.map((l) => { return { Name: l.name, Size: l.size, - Hash: cidToString(l.cid, { cidBase, upgrade: false }) + Hash: cidToString(l.cid, { base: cidBase, upgrade: false }) } }) } diff --git a/packages/ipfs-http-server/src/api/resources/pin.js b/packages/ipfs-http-server/src/api/resources/pin.js index 17997914d5..207023c217 100644 --- a/packages/ipfs-http-server/src/api/resources/pin.js +++ b/packages/ipfs-http-server/src/api/resources/pin.js @@ -3,7 +3,7 @@ const Joi = require('../../utils/joi') const Boom = require('@hapi/boom') const { map, reduce } = require('streaming-iterables') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const ndjson = require('iterable-ndjson') const { cidToString } = require('ipfs-core-utils/src/cid') const streamResponse = require('../../utils/stream-response') diff --git a/packages/ipfs-http-server/src/api/resources/ping.js b/packages/ipfs-http-server/src/api/resources/ping.js index b82bf037b9..0d8af0452a 100644 --- a/packages/ipfs-http-server/src/api/resources/ping.js +++ b/packages/ipfs-http-server/src/api/resources/ping.js @@ -1,7 +1,7 @@ 'use strict' const Joi = require('../../utils/joi') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const { map } = require('streaming-iterables') const ndjson = require('iterable-ndjson') const streamResponse = require('../../utils/stream-response') diff --git a/packages/ipfs-http-server/src/api/resources/pubsub.js b/packages/ipfs-http-server/src/api/resources/pubsub.js index 2d2d8685c9..b974535b09 100644 --- a/packages/ipfs-http-server/src/api/resources/pubsub.js +++ b/packages/ipfs-http-server/src/api/resources/pubsub.js @@ -85,7 +85,7 @@ exports.publish = { }, pre: [{ assign: 'data', - method: async (request, h) => { + method: async (request, _h) => { if (!request.payload) { throw Boom.badRequest('argument "data" is required') } diff --git a/packages/ipfs-http-server/src/api/resources/repo.js b/packages/ipfs-http-server/src/api/resources/repo.js index 4c52613fb2..32b210ae78 100644 --- a/packages/ipfs-http-server/src/api/resources/repo.js +++ b/packages/ipfs-http-server/src/api/resources/repo.js @@ -2,7 +2,7 @@ const Joi = require('../../utils/joi') const { map, filter } = require('streaming-iterables') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const ndjson = require('iterable-ndjson') const streamResponse = require('../../utils/stream-response') diff --git a/packages/ipfs-http-server/src/api/resources/shutdown.js b/packages/ipfs-http-server/src/api/resources/shutdown.js index 50fcaf2fdf..428c902226 100644 --- a/packages/ipfs-http-server/src/api/resources/shutdown.js +++ b/packages/ipfs-http-server/src/api/resources/shutdown.js @@ -7,7 +7,8 @@ * on the next 'tick' emits SIGTERM. */ module.exports = { - handler: (request, h) => { + handler: (_request, h) => { + // @ts-ignore - TS expects second argument setImmediate(() => process.emit('SIGTERM')) return h.response() } diff --git a/packages/ipfs-http-server/src/api/resources/stats.js b/packages/ipfs-http-server/src/api/resources/stats.js index cecac390ec..6cb999ba20 100644 --- a/packages/ipfs-http-server/src/api/resources/stats.js +++ b/packages/ipfs-http-server/src/api/resources/stats.js @@ -1,7 +1,7 @@ 'use strict' const { map } = require('streaming-iterables') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const ndjson = require('iterable-ndjson') const streamResponse = require('../../utils/stream-response') const Joi = require('../../utils/joi') diff --git a/packages/ipfs-http-server/src/api/routes/index.js b/packages/ipfs-http-server/src/api/routes/index.js index 2b514e1e5b..72258ea5d4 100644 --- a/packages/ipfs-http-server/src/api/routes/index.js +++ b/packages/ipfs-http-server/src/api/routes/index.js @@ -29,4 +29,5 @@ const routes = [ // webui is loaded from API port, but works over GET (not a part of RPC API) const extraRoutes = [...require('./webui')] +// @ts-ignore - two routes array seem to inferred as diff types module.exports = routes.concat(extraRoutes) diff --git a/packages/ipfs-http-server/src/api/routes/webui.js b/packages/ipfs-http-server/src/api/routes/webui.js index 5d25ceed6c..53767f2d1a 100644 --- a/packages/ipfs-http-server/src/api/routes/webui.js +++ b/packages/ipfs-http-server/src/api/routes/webui.js @@ -1,9 +1,6 @@ 'use strict' -const debug = require('debug') const { gateway } = require('ipfs-http-gateway/src/resources') -const log = debug('ipfs:webui:info') -log.error = debug('ipfs:webui:error') const webuiCid = 'bafybeif4zkmu7qdhkpf3pnhwxipylqleof7rl6ojbe7mq3fzogz6m4xk3i' // v2.11.4 @@ -24,7 +21,7 @@ module.exports = [ { method: 'GET', path: '/webui/{slug?}', // optional slug makes it work with and without slash - handler (request, h) { + handler (_request, h) { return h.redirect(`/ipfs/${webuiCid}/`) } } diff --git a/packages/ipfs-http-server/src/index.js b/packages/ipfs-http-server/src/index.js index 586d1e4149..4b6fb5cf06 100644 --- a/packages/ipfs-http-server/src/index.js +++ b/packages/ipfs-http-server/src/index.js @@ -7,7 +7,6 @@ const multiaddr = require('multiaddr') const toMultiaddr = require('uri-to-multiaddr') const Boom = require('@hapi/boom') const AbortController = require('native-abort-controller') - const errorHandler = require('./error-handler') const LOG = 'ipfs:http-api' const LOG_ERROR = 'ipfs:http-api:error' @@ -42,11 +41,11 @@ async function serverCreator (serverAddrs, createServer, ipfs, cors) { } class HttpApi { - constructor (ipfs, options) { + constructor (ipfs, options = {}) { this._ipfs = ipfs - this._options = options || {} - this._log = debug(LOG) - this._log.error = debug(LOG_ERROR) + this._log = Object.assign(debug(LOG), { + error: debug(LOG_ERROR) + }) } async start () { diff --git a/packages/ipfs-http-server/src/utils/joi.js b/packages/ipfs-http-server/src/utils/joi.js index 9353364dd4..80fb0347e0 100644 --- a/packages/ipfs-http-server/src/utils/joi.js +++ b/packages/ipfs-http-server/src/utils/joi.js @@ -45,12 +45,14 @@ const reqiureIfRequired = (value, helpers) => { module.exports = Joi .extend( + // @ts-ignore - according to typedfs coerce should always return + // { errors?: ErrorReport[], value?: any } (joi) => { return { type: 'cid', base: joi.any(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -64,7 +66,7 @@ module.exports = Joi type: 'ipfsPath', base: joi.string(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -78,7 +80,7 @@ module.exports = Joi type: 'peerId', base: joi.string(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -92,7 +94,7 @@ module.exports = Joi type: 'multiaddr', base: joi.string(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -106,7 +108,7 @@ module.exports = Joi type: 'timeout', base: joi.number(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -120,7 +122,7 @@ module.exports = Joi type: 'cidAndPath', base: joi.any(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -134,7 +136,7 @@ module.exports = Joi type: 'cidBase', base: joi.string(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } @@ -152,7 +154,7 @@ module.exports = Joi type: 'json', base: joi.any(), validate: reqiureIfRequired, - coerce (value, helpers) { + coerce (value, _helpers) { if (!value) { return } diff --git a/packages/ipfs-http-server/src/utils/multipart-request-parser.js b/packages/ipfs-http-server/src/utils/multipart-request-parser.js index 00c52715a7..baa412a683 100644 --- a/packages/ipfs-http-server/src/utils/multipart-request-parser.js +++ b/packages/ipfs-http-server/src/utils/multipart-request-parser.js @@ -78,16 +78,16 @@ async function * parseEntry (stream, options) { const query = qs.parse(disposition.name.split('?').pop()) if (query.mode) { - entry.mode = parseInt(query.mode, 8) + entry.mode = parseInt(readQueryParam(query.mode), 8) } if (query.mtime) { entry.mtime = { - secs: parseInt(query.mtime, 10) + secs: parseInt(readQueryParam(query.mtime), 10) } if (query['mtime-nsecs']) { - entry.mtime.nsecs = parseInt(query['mtime-nsecs'], 10) + entry.mtime.nsecs = parseInt(readQueryParam(query['mtime-nsecs']), 10) } } @@ -98,6 +98,12 @@ async function * parseEntry (stream, options) { } } +/** + * @param {string|string[]} value + * @returns {string} + */ +const readQueryParam = value => Array.isArray(value) ? value[0] : value + async function * parser (stream, options) { for await (const entry of parseEntry(multipart(stream, options.boundary), options)) { if (entry.type === 'directory') { diff --git a/packages/ipfs-http-server/src/utils/stream-response.js b/packages/ipfs-http-server/src/utils/stream-response.js index 87dab32ea2..9a76168958 100644 --- a/packages/ipfs-http-server/src/utils/stream-response.js +++ b/packages/ipfs-http-server/src/utils/stream-response.js @@ -1,7 +1,7 @@ 'use strict' const { PassThrough } = require('stream') -const pipe = require('it-pipe') +const { pipe } = require('it-pipe') const log = require('debug')('ipfs:http-api:utils:stream-response') const toIterable = require('stream-to-it') diff --git a/packages/ipfs-http-server/tsconfig.json b/packages/ipfs-http-server/tsconfig.json new file mode 100644 index 0000000000..2e66dcb8a7 --- /dev/null +++ b/packages/ipfs-http-server/tsconfig.json @@ -0,0 +1,21 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "package.json" + ], + "references": [ + { + "path": "../ipfs-core-utils" + }, + { + "path": "../ipfs-core" + }, + { + "path": "../ipfs-http-gateway" + } + ] +} diff --git a/packages/ipfs-message-port-client/.aegir.js b/packages/ipfs-message-port-client/.aegir.js index b24f1478c8..688a1713bf 100644 --- a/packages/ipfs-message-port-client/.aegir.js +++ b/packages/ipfs-message-port-client/.aegir.js @@ -4,7 +4,7 @@ const EchoServer = require('aegir/utils/echo-server') const echoServer = new EchoServer() module.exports = { - bundlesize: { maxSize: '80kB' }, + bundlesize: { maxSize: '12kB' }, karma: { files: [ { diff --git a/packages/ipfs-message-port-client/package.json b/packages/ipfs-message-port-client/package.json index fd6ae14cb1..baab3962c1 100644 --- a/packages/ipfs-message-port-client/package.json +++ b/packages/ipfs-message-port-client/package.json @@ -16,6 +16,13 @@ "dist" ], "main": "src/index.js", + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } + }, "browser": {}, "repository": { "type": "git", @@ -26,7 +33,9 @@ "test:interface:message-port-client": "npm run build:test-worker && aegir test -t browser -f ./test/interface-message-port-client.js", "lint": "aegir lint", "build:test-worker": "aegir build -- --config ./test/util/webpack.config.js", - "build": "aegir build", + "build": "npm run build:bundle && npm run build:types", + "build:bundle": "aegir build", + "build:types": "tsc --build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rm -rf ./dist", "dep-check": "aegir dep-check" @@ -36,13 +45,14 @@ "cids": "^1.0.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "cross-env": "^7.0.0", "interface-ipfs-core": "^0.140.0", "ipfs": "^0.50.2", "ipfs-message-port-protocol": "^0.2.0", "ipfs-message-port-server": "^0.2.2", - "ipld-dag-pb": "^0.20.0" + "ipld-dag-pb": "^0.20.0", + "typescript": "^4.0.3" }, "engines": { "node": ">=10.3.0", diff --git a/packages/ipfs-message-port-client/src/block.js b/packages/ipfs-message-port-client/src/block.js index 581b06afce..fae2c7f4a5 100644 --- a/packages/ipfs-message-port-client/src/block.js +++ b/packages/ipfs-message-port-client/src/block.js @@ -1,6 +1,6 @@ 'use strict' -const { Client } = require('./client') +const Client = require('./client') const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') const { decodeError } = require('ipfs-message-port-protocol/src/error') const { @@ -14,7 +14,7 @@ const { * @typedef {import('ipfs-message-port-server/src/block').EncodedBlock} EncodedBlock * @typedef {import('ipfs-message-port-server/src/block').Rm} EncodedRmEntry * @typedef {import('ipfs-message-port-server/src/block').BlockService} BlockService - * @typedef {import('./client').ClientTransport} Transport + * @typedef {import('./client').MessageTransport} MessageTransport */ /** @@ -23,7 +23,7 @@ const { */ class BlockClient extends Client { /** - * @param {Transport} transport + * @param {MessageTransport} transport */ constructor (transport) { super('block', ['put', 'get', 'rm', 'stat'], transport) diff --git a/packages/ipfs-message-port-client/src/client.js b/packages/ipfs-message-port-client/src/client.js index 45acc20ad0..0cf43f2e5a 100644 --- a/packages/ipfs-message-port-client/src/client.js +++ b/packages/ipfs-message-port-client/src/client.js @@ -1,7 +1,8 @@ 'use strict' /* eslint-env browser */ -const { decodeError } = require('ipfs-message-port-protocol/src/error') + +const Service = require('./client/service') /** * @template T @@ -13,274 +14,6 @@ const { decodeError } = require('ipfs-message-port-protocol/src/error') * @typedef {import('ipfs-message-port-protocol/src/rpc').ProcedureNames} ProcedureNames */ -/** - * @typedef {Object} QueryOptions - * @property {AbortSignal} [signal] - * @property {number} [timeout] - * @property {Transferable[]} [transfer] - */ - -/** - * @template I - * @typedef {I & QueryOptions} QueryInput - */ - -/** - * Represents server query, encapsulating inputs to the server endpoint and - * promise of it's result. - * - * @template I,O - * @class - */ -class Query { - /** - * @param {string} namespace - component namespace on the server. - * @param {string} method - remote method this is a query of. - * @param {QueryInput} input - query input. - */ - constructor (namespace, method, input) { - /** @type {Promise} */ - this.result = new Promise((resolve, reject) => { - this.succeed = resolve - this.fail = reject - this.signal = input.signal - this.input = input - this.namespace = namespace - this.method = method - this.timeout = input.timeout == null ? Infinity : input.timeout - /** @type {number|null} */ - this.timerID = null - }) - } - - /** - * Data that will be structure cloned over message channel. - * - * @returns {Object} - */ - toJSON () { - return this.input - } - - /** - * Data that will be transferred over message channel. - * - * @returns {Transferable[]} - */ - transfer () { - return this.input.transfer - } -} - -/** @typedef {Transport} ClientTransport */ - -/** - * RPC Transport over `MessagePort` that can execute queries. It takes care of - * executing queries by issuing a message with unique ID and fullfilling a - * query when corresponding response message is received. It also makes sure - * that aborted / timed out queries are cancelled as needed. - * - * It is expected that there will be at most one transport for a message port - * instance. - * - * @class - */ -class Transport { - /** - * Create transport for the underlying message port. - * - * @param {MessagePort} [port] - */ - constructor (port) { - this.port = null - // Assigining a random enough identifier to the transport, to ensure that - // query.id will be unique when multiple tabs are communicating with a - // a server in the SharedWorker. - this.id = Math.random() - .toString(32) - .slice(2) - - // Local unique id on the transport which is incremented for each query. - this.nextID = 0 - - // Dictionary of pending requests - /** @type {Record>} */ - this.queries = Object.create(null) - - // If port is provided connect this transport to it. If not transport can - // queue queries and execute those once it's connected. - if (port) { - this.connect(port) - } - } - - /** - * Executes given query with this transport and returns promise for it's - * result. Promise fails with an error if query fails. - * - * @template I, O - * @param {Query} query - * @returns {Promise} - */ - execute (query) { - const id = `${this.id}@${this.nextID++}` - this.queries[id] = query - - // If query has a timeout set a timer. - if (query.timeout > 0 && query.timeout < Infinity) { - query.timerID = setTimeout(Transport.timeout, query.timeout, this, id) - } - - if (query.signal) { - query.signal.addEventListener('abort', () => this.abort(id), { - once: true - }) - } - - // If transport is connected (it has port) post a query, otherwise it - // will remain in the pending queries queue. - if (this.port) { - Transport.postQuery(this.port, id, query) - } - - return query.result - } - - /** - * Connects this transport to the given message port. Throws `Error` if - * transport is already connected. All the pending queries will be executed - * as connection occurs. - * - * @param {MessagePort} port - */ - connect (port) { - if (this.port) { - throw new Error('Transport is already open') - } else { - this.port = port - this.port.addEventListener('message', this) - this.port.start() - - // Go ever pending queries (that were submitted before transport was - // connected) and post them. This loop is safe because messages will not - // arrive while this loop is running so no mutation can occur. - for (const [id, query] of Object.entries(this.queries)) { - Transport.postQuery(port, id, query) - } - } - } - - /** - * Disconnects this transport. This will cause all the pending queries - * to be aborted and undelying message port to be closed. - * - * Once disconnected transport can not be reconnected back. - */ - disconnect () { - const error = new DisconnectError() - for (const [id, query] of Object.entries(this.queries)) { - query.fail(error) - this.abort(id) - } - - // Note that reference to port is kept that ensures that attempt to - // reconnect will throw an error. - if (this.port) { - this.port.removeEventListener('message', this) - this.port.close() - } - } - - /** - * Invoked on query timeout. If query is still pending it will fail and - * abort message will be send to a the server. - * - * @param {Transport} self - * @param {string} id - */ - static timeout (self, id) { - const { queries } = self - const query = queries[id] - if (query) { - delete queries[id] - query.fail(new TimeoutError('request timed out')) - if (self.port) { - self.port.postMessage({ type: 'abort', id }) - } - } - } - - /** - * Aborts this query by failing with `AbortError` and sending an abort message - * to the server. If query is no longer pending this has no effect. - * - * @param {string} id - */ - abort (id) { - const { queries } = this - const query = queries[id] - if (query) { - delete queries[id] - - query.fail(new AbortError()) - if (this.port) { - this.port.postMessage({ type: 'abort', id }) - } - - if (query.timerID != null) { - clearTimeout(query.timerID) - } - } - } - - /** - * Sends a given `query` with a given `id` over the message channel. - * - * @param {MessagePort} port - * @param {string} id - * @param {Query} query - */ - static postQuery (port, id, query) { - port.postMessage( - { - type: 'query', - namespace: query.namespace, - method: query.method, - id, - input: query.toJSON() - }, - query.transfer() - ) - } - - /** - * Handler is invoked when message on the message port is received. - * - * @param {MessageEvent} event - */ - handleEvent (event) { - const { id, result } = event.data - const query = this.queries[id] - // If query with a the given ID is found it is completed with the result, - // otherwise it is cancelled. - // Note: query may not be found when it was aborted on the client and at the - // same time server posted response. - if (query) { - delete this.queries[id] - if (result.ok) { - query.succeed(result.value) - } else { - query.fail(decodeError(result.error)) - } - - if (query.timerID != null) { - clearTimeout(query.timerID) - } - } - } -} -exports.Transport = Transport - /** * @template T * @typedef {Array} Keys @@ -291,37 +24,6 @@ exports.Transport = Transport * @typedef {Remote & Service} RemoteService */ -/** - * Service represents an API to a remote service `T`. It will have all the - * methods with the same signatures as `T`. - * - * @class - * @template T - */ -class Service { - /** - * @param {string} namespace - Namespace that remote API is served under. - * @param {ProcedureNames} methods - Method names of the remote API. - * @param {Transport} transport - Transport to issue queries over. - */ - constructor (namespace, methods, transport) { - this.transport = transport - // Type script does not like using classes as some dicitionaries, so - // we explicitly type it as dictionary. - /** @type {Object., Function>} */ - const api = this - for (const method of methods) { - /** - * @template I, O - * @param {I} input - * @returns {Promise} - */ - api[method] = input => - this.transport.execute(new Query(namespace, method.toString(), input)) - } - } -} - /** * Client represents the client to remote `T` service. It is a base clase that * specific API clients will subclass to provide a higher level API for end @@ -331,36 +33,18 @@ class Service { * @class * @template T */ -class Client { +module.exports = class Client { /** * @param {string} namespace * @param {ProcedureNames} methods - * @param {Transport} transport + * @param {MessageTransport} transport */ constructor (namespace, methods, transport) { /** @type {RemoteService} */ this.remote = (new Service(namespace, methods, transport)) } } -exports.Client = Client - -class TimeoutError extends Error { - get name () { - return this.constructor.name - } -} -exports.TimeoutError = TimeoutError - -class AbortError extends Error { - get name () { - return this.constructor.name - } -} -exports.AbortError = AbortError -class DisconnectError extends Error { - get name () { - return this.constructor.name - } -} -exports.DisconnectError = DisconnectError +/** + * @typedef {import('./client/transport')} MessageTransport + */ diff --git a/packages/ipfs-message-port-client/src/client/error.js b/packages/ipfs-message-port-client/src/client/error.js new file mode 100644 index 0000000000..a42b21c928 --- /dev/null +++ b/packages/ipfs-message-port-client/src/client/error.js @@ -0,0 +1,19 @@ +'use strict' + +exports.TimeoutError = class TimeoutError extends Error { + get name () { + return this.constructor.name + } +} + +exports.AbortError = class AbortError extends Error { + get name () { + return this.constructor.name + } +} + +exports.DisconnectError = class DisconnectError extends Error { + get name () { + return this.constructor.name + } +} diff --git a/packages/ipfs-message-port-client/src/client/query.js b/packages/ipfs-message-port-client/src/client/query.js new file mode 100644 index 0000000000..3246eeac92 --- /dev/null +++ b/packages/ipfs-message-port-client/src/client/query.js @@ -0,0 +1,58 @@ +'use strict' + +/** + * Represents server query, encapsulating inputs to the server endpoint and + * promise of it's result. + * + * @template I,O + */ +module.exports = class Query { + /** + * @param {string} namespace - component namespace on the server. + * @param {string} method - remote method this is a query of. + * @param {QueryInput} input - query input. + */ + constructor (namespace, method, input) { + /** @type {Promise} */ + this.result = new Promise((resolve, reject) => { + this.succeed = resolve + this.fail = reject + this.signal = input.signal + this.input = input + this.namespace = namespace + this.method = method + this.timeout = input.timeout == null ? Infinity : input.timeout + /** @type {number|null} */ + this.timerID = null + }) + } + + /** + * Data that will be structure cloned over message channel. + * + * @returns {Object} + */ + toJSON () { + return this.input + } + + /** + * Data that will be transferred over message channel. + * + * @returns {Transferable[]|void} + */ + transfer () { + return this.input.transfer + } +} + +/** + * @typedef {Object} QueryOptions + * @property {AbortSignal} [signal] + * @property {number} [timeout] + * @property {Transferable[]} [transfer] + */ +/** + * @template I + * @typedef {I & QueryOptions} QueryInput + */ diff --git a/packages/ipfs-message-port-client/src/client/service.js b/packages/ipfs-message-port-client/src/client/service.js new file mode 100644 index 0000000000..3c8802adf9 --- /dev/null +++ b/packages/ipfs-message-port-client/src/client/service.js @@ -0,0 +1,41 @@ +'use strict' + +const Query = require('./query') + +/** + * Service represents an API to a remote service `T`. It will have all the + * methods with the same signatures as `T`. + * + * @template T + */ +module.exports = class Service { + /** + * @param {string} namespace - Namespace that remote API is served under. + * @param {ProcedureNames} methods - Method names of the remote API. + * @param {MessageTransport} transport - Transport to issue queries over. + */ + constructor (namespace, methods, transport) { + this.transport = transport + // Type script does not like using classes as some dicitionaries, so + // we explicitly type it as dictionary. + /** @type {Object., Function>} */ + const api = this + for (const method of methods) { + /** + * @template I, O + * @param {I} input + * @returns {Promise} + */ + api[method] = input => + this.transport.execute(new Query(namespace, method.toString(), input)) + } + } +} + +/** + * @typedef {import('./transport')} MessageTransport + */ +/** + * @template T + * @typedef {import('ipfs-message-port-protocol/src/rpc').ProcedureNames} ProcedureNames + */ diff --git a/packages/ipfs-message-port-client/src/client/transport.js b/packages/ipfs-message-port-client/src/client/transport.js new file mode 100644 index 0000000000..83b53aa66d --- /dev/null +++ b/packages/ipfs-message-port-client/src/client/transport.js @@ -0,0 +1,215 @@ +'use strict' + +const { decodeError } = require('ipfs-message-port-protocol/src/error') +const { DisconnectError, TimeoutError, AbortError } = require('./error') + +/** + * RPC Transport over `MessagePort` that can execute queries. It takes care of + * executing queries by issuing a message with unique ID and fullfilling a + * query when corresponding response message is received. It also makes sure + * that aborted / timed out queries are cancelled as needed. + * + * It is expected that there will be at most one transport for a message port + * instance. + * + */ +module.exports = class MessageTransport { + /** + * Create transport for the underlying message port. + * + * @param {MessagePort} [port] + */ + constructor (port) { + this.port = null + // Assigining a random enough identifier to the transport, to ensure that + // query.id will be unique when multiple tabs are communicating with a + // a server in the SharedWorker. + this.id = Math.random() + .toString(32) + .slice(2) + + // Local unique id on the transport which is incremented for each query. + this.nextID = 0 + + // Dictionary of pending requests + /** @type {Record>} */ + this.queries = Object.create(null) + + // If port is provided connect this transport to it. If not transport can + // queue queries and execute those once it's connected. + if (port) { + this.connect(port) + } + } + + /** + * Executes given query with this transport and returns promise for it's + * result. Promise fails with an error if query fails. + * + * @template I, O + * @param {Query} query + * @returns {Promise} + */ + execute (query) { + const id = `${this.id}@${this.nextID++}` + this.queries[id] = query + + // If query has a timeout set a timer. + if (query.timeout > 0 && query.timeout < Infinity) { + query.timerID = setTimeout(MessageTransport.timeout, query.timeout, this, id) + } + + if (query.signal) { + query.signal.addEventListener('abort', () => this.abort(id), { + once: true + }) + } + + // If transport is connected (it has port) post a query, otherwise it + // will remain in the pending queries queue. + if (this.port) { + MessageTransport.postQuery(this.port, id, query) + } + + return query.result + } + + /** + * Connects this transport to the given message port. Throws `Error` if + * transport is already connected. All the pending queries will be executed + * as connection occurs. + * + * @param {MessagePort} port + */ + connect (port) { + if (this.port) { + throw new Error('Transport is already open') + } else { + this.port = port + this.port.addEventListener('message', this) + this.port.start() + + // Go ever pending queries (that were submitted before transport was + // connected) and post them. This loop is safe because messages will not + // arrive while this loop is running so no mutation can occur. + for (const [id, query] of Object.entries(this.queries)) { + MessageTransport.postQuery(port, id, query) + } + } + } + + /** + * Disconnects this transport. This will cause all the pending queries + * to be aborted and undelying message port to be closed. + * + * Once disconnected transport can not be reconnected back. + */ + disconnect () { + const error = new DisconnectError() + for (const [id, query] of Object.entries(this.queries)) { + query.fail(error) + this.abort(id) + } + + // Note that reference to port is kept that ensures that attempt to + // reconnect will throw an error. + if (this.port) { + this.port.removeEventListener('message', this) + this.port.close() + } + } + + /** + * Invoked on query timeout. If query is still pending it will fail and + * abort message will be send to a the server. + * + * @param {MessageTransport} self + * @param {string} id + */ + static timeout (self, id) { + const { queries } = self + const query = queries[id] + if (query) { + delete queries[id] + query.fail(new TimeoutError('request timed out')) + if (self.port) { + self.port.postMessage({ type: 'abort', id }) + } + } + } + + /** + * Aborts this query by failing with `AbortError` and sending an abort message + * to the server. If query is no longer pending this has no effect. + * + * @param {string} id + */ + abort (id) { + const { queries } = this + const query = queries[id] + if (query) { + delete queries[id] + + query.fail(new AbortError()) + if (this.port) { + this.port.postMessage({ type: 'abort', id }) + } + + if (query.timerID != null) { + clearTimeout(query.timerID) + } + } + } + + /** + * Sends a given `query` with a given `id` over the message channel. + * + * @param {MessagePort} port + * @param {string} id + * @param {Query} query + */ + static postQuery (port, id, query) { + port.postMessage( + { + type: 'query', + namespace: query.namespace, + method: query.method, + id, + input: query.toJSON() + }, + // @ts-ignore - TS seems to want second arg to postMessage to not be undefined + query.transfer() + ) + } + + /** + * Handler is invoked when message on the message port is received. + * + * @param {MessageEvent} event + */ + handleEvent (event) { + const { id, result } = event.data + const query = this.queries[id] + // If query with a the given ID is found it is completed with the result, + // otherwise it is cancelled. + // Note: query may not be found when it was aborted on the client and at the + // same time server posted response. + if (query) { + delete this.queries[id] + if (result.ok) { + query.succeed(result.value) + } else { + query.fail(decodeError(result.error)) + } + + if (query.timerID != null) { + clearTimeout(query.timerID) + } + } + } +} + +/** + * @template I,O + * @typedef {import('./query')} Query + */ diff --git a/packages/ipfs-message-port-client/src/core.js b/packages/ipfs-message-port-client/src/core.js index 388fde52d6..f2f096ca8c 100644 --- a/packages/ipfs-message-port-client/src/core.js +++ b/packages/ipfs-message-port-client/src/core.js @@ -2,7 +2,7 @@ /* eslint-env browser */ -const { Client } = require('./client') +const Client = require('./client') const { encodeCID, decodeCID, CID } = require('ipfs-message-port-protocol/src/cid') const { decodeIterable, @@ -51,7 +51,7 @@ const iterateReadableStream = require('browser-readablestream-to-it') * @typedef {import('ipfs-message-port-server/src/core').AddedEntry} AddedEntry * @typedef {import('ipfs-message-port-server/src/core').EncodedLsEntry} EncodedLsEntry * @typedef {import('ipfs-message-port-server/src/core').LsEntry} LsEntry - * @typedef {import('./client').ClientTransport} Transport + * @typedef {import('./client').MessageTransport} MessageTransport */ /** @@ -60,7 +60,7 @@ const iterateReadableStream = require('browser-readablestream-to-it') */ class CoreClient extends Client { /** - * @param {Transport} transport + * @param {MessageTransport} transport */ constructor (transport) { super('core', ['add', 'addAll', 'cat', 'ls'], transport) @@ -353,7 +353,7 @@ const encodeAsyncIterableContent = (content, transfer) => { } /** - * @param {number|Bytes|Blob|string|FileObject} content + * @param {number|Bytes|Blob|string|FileObject|void} content * @param {Transferable[]} transfer * @returns {FileInput|ArrayBuffer|ArrayBufferView} */ @@ -388,7 +388,7 @@ const encodeFileObject = ({ path, mode, mtime, content }, transfer) => { path, mode, mtime, - content: encodeFileContent(content, transfer) + content: content ? encodeFileContent(content, transfer) : undefined } } diff --git a/packages/ipfs-message-port-client/src/dag.js b/packages/ipfs-message-port-client/src/dag.js index 0bf62cfd4c..7a7434de89 100644 --- a/packages/ipfs-message-port-client/src/dag.js +++ b/packages/ipfs-message-port-client/src/dag.js @@ -1,6 +1,6 @@ 'use strict' -const { Client } = require('./client') +const Client = require('./client') const { encodeCID, decodeCID } = require('ipfs-message-port-protocol/src/cid') const { encodeNode, decodeNode } = require('ipfs-message-port-protocol/src/dag') @@ -11,7 +11,7 @@ const { encodeNode, decodeNode } = require('ipfs-message-port-protocol/src/dag') * @typedef {import('ipfs-message-port-server/src/dag').EncodedDAGNode} EncodedDAGNode * @typedef {import('ipfs-message-port-server/src/dag').DAGEntry} DAGEntry * @typedef {import('ipfs-message-port-server/src/dag').DAGService} DagService - * @typedef {import('./client').ClientTransport} Transport + * @typedef {import('./client').MessageTransport} MessageTransport */ /** @@ -20,7 +20,7 @@ const { encodeNode, decodeNode } = require('ipfs-message-port-protocol/src/dag') */ class DAGClient extends Client { /** - * @param {Transport} transport + * @param {MessageTransport} transport */ constructor (transport) { super('dag', ['put', 'get', 'resolve', 'tree'], transport) @@ -44,8 +44,8 @@ class DAGClient extends Client { const encodedCID = await this.remote.put({ ...options, - dagNode: encodeNode(dagNode, options.transfer), - cid: cid != null ? encodeCID(cid) : undefined + cid: cid != null ? encodeCID(cid) : undefined, + dagNode: encodeNode(dagNode, options.transfer) }) return decodeCID(encodedCID) diff --git a/packages/ipfs-message-port-client/src/files.js b/packages/ipfs-message-port-client/src/files.js index 73e84800b1..1aab19f11f 100644 --- a/packages/ipfs-message-port-client/src/files.js +++ b/packages/ipfs-message-port-client/src/files.js @@ -1,13 +1,13 @@ 'use strict' /* eslint-env browser */ -const { Client } = require('./client') +const Client = require('./client') const { decodeCID, CID } = require('ipfs-message-port-protocol/src/cid') /** * @typedef {import('ipfs-message-port-server/src/files').FilesService} FilesService * @typedef {import('ipfs-message-port-server/src/files').EncodedStat} EncodedStat - * @typedef {import('./client').ClientTransport} Transport + * @typedef {import('./client').MessageTransport} MessageTransport */ /** @@ -16,7 +16,7 @@ const { decodeCID, CID } = require('ipfs-message-port-protocol/src/cid') */ class FilesClient extends Client { /** - * @param {Transport} transport + * @param {MessageTransport} transport */ constructor (transport) { super('files', ['stat'], transport) diff --git a/packages/ipfs-message-port-client/src/index.js b/packages/ipfs-message-port-client/src/index.js index f049b43f84..fe005187e2 100644 --- a/packages/ipfs-message-port-client/src/index.js +++ b/packages/ipfs-message-port-client/src/index.js @@ -1,14 +1,14 @@ 'use strict' /* eslint-env browser */ -const { Transport } = require('./client') +const MessageTransport = require('./client/transport') const BlockClient = require('./block') const DAGClient = require('./dag') const CoreClient = require('./core') const FilesClient = require('./files') /** - * @typedef {import('./client').Transport} ClientTransport + * @typedef {import('./client').MessageTransport} MessageTransport * * @typedef {Object} ClientOptions * @property {MessagePort} port @@ -16,7 +16,7 @@ const FilesClient = require('./files') class IPFSClient extends CoreClient { /** - * @param {ClientTransport} transport + * @param {MessageTransport} transport */ constructor (transport) { super(transport) @@ -46,7 +46,7 @@ class IPFSClient extends CoreClient { * @returns {IPFSClient} */ static detached () { - return new IPFSClient(new Transport(undefined)) + return new IPFSClient(new MessageTransport(undefined)) } /** @@ -57,7 +57,7 @@ class IPFSClient extends CoreClient { * @returns {IPFSClient} */ static from (port) { - return new IPFSClient(new Transport(port)) + return new IPFSClient(new MessageTransport(port)) } } diff --git a/packages/ipfs-message-port-client/tsconfig.json b/packages/ipfs-message-port-client/tsconfig.json index 088cc55425..600c8308cd 100644 --- a/packages/ipfs-message-port-client/tsconfig.json +++ b/packages/ipfs-message-port-client/tsconfig.json @@ -1,28 +1,18 @@ { + "extends": "../../tsconfig.json", "compilerOptions": { - "allowJs": true, - "checkJs": true, - "forceConsistentCasingInFileNames": true, - "noImplicitReturns": true, - "noImplicitAny": true, - "noImplicitThis": true, - "noFallthroughCasesInSwitch": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "strict": true, - "alwaysStrict": true, - "esModuleInterop": true, - "target": "ES5", - "noEmit": true + "outDir": "dist" }, - "exclude": ["dist", "node_modules"], "include": [ - "src/**/*.js", - "../ipfs-message-port-server/src/**/*.js", - "../ipfs-message-port-protocol/src/**/*.js" + "src", + "package.json" ], - "compileOnSave": false + "references": [ + { + "path": "../ipfs-message-port-protocol" + }, + { + "path": "../ipfs-message-port-server" + } + ] } diff --git a/packages/ipfs-message-port-protocol/.aegir.js b/packages/ipfs-message-port-protocol/.aegir.js new file mode 100644 index 0000000000..823e1d0e1a --- /dev/null +++ b/packages/ipfs-message-port-protocol/.aegir.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = { + bundlesize: { maxSize: '547B' } +} diff --git a/packages/ipfs-message-port-protocol/package.json b/packages/ipfs-message-port-protocol/package.json index 7d5f4f3f30..25d3fae790 100644 --- a/packages/ipfs-message-port-protocol/package.json +++ b/packages/ipfs-message-port-protocol/package.json @@ -13,6 +13,13 @@ "src", "dist" ], + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } + }, "browser": {}, "repository": { "type": "git", @@ -26,7 +33,9 @@ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:bundle && npm run build:types", + "build:bundle": "aegir build", + "build:types": "tsc --build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rm -rf ./dist", "dep-check": "aegir dep-check" @@ -36,9 +45,9 @@ "ipld-block": "^0.10.1" }, "devDependencies": { - "aegir": "^27.0.0", - "interface-ipfs-core": "^0.140.0", - "uint8arrays": "^1.1.0" + "aegir": "^28.0.0", + "uint8arrays": "^1.1.0", + "typescript": "^4.0.3" }, "engines": { "node": ">=10.3.0", diff --git a/packages/ipfs-message-port-protocol/tsconfig.json b/packages/ipfs-message-port-protocol/tsconfig.json index 4c65e6338b..979a39adab 100644 --- a/packages/ipfs-message-port-protocol/tsconfig.json +++ b/packages/ipfs-message-port-protocol/tsconfig.json @@ -1,21 +1,10 @@ { + "extends": "../../tsconfig.json", "compilerOptions": { - "allowJs": true, - "checkJs": true, - "noImplicitReturns": true, - "noImplicitAny": true, - "noImplicitThis": true, - "noEmitHelpers": true, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "strict": true, - "esModuleInterop": true, - "alwaysStrict": true, - "target": "ES5", - "outDir": "./dist/" + "outDir": "dist" }, - "exclude": ["dist"], - "include": ["src", "../../node_modules/ipld-block/src/index.js"], - "compileOnSave": false + "include": [ + "src", + "package.json" + ] } diff --git a/packages/ipfs-message-port-server/package.json b/packages/ipfs-message-port-server/package.json index 7af230dd3b..c1a074f55c 100644 --- a/packages/ipfs-message-port-server/package.json +++ b/packages/ipfs-message-port-server/package.json @@ -17,6 +17,13 @@ ], "main": "src/index.js", "browser": {}, + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } + }, "repository": { "type": "git", "url": "git+https://github.com/ipfs/js-ipfs.git" @@ -28,7 +35,9 @@ "test:chrome": "aegir test -t browser -t webworker -- --browsers ChromeHeadless", "test:firefox": "aegir test -t browser -t webworker -- --browsers FirefoxHeadless", "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:bundle && npm run build:types", + "build:bundle": "aegir build", + "build:types": "tsc --build", "coverage": "npx nyc -r html npm run test:node -- --bail", "clean": "rm -rf ./dist", "dep-check": "aegir dep-check" @@ -39,11 +48,10 @@ }, "devDependencies": { "@types/it-all": "^1.0.0", - "aegir": "^27.0.0", + "aegir": "^28.0.0", "cross-env": "^7.0.0", - "interface-ipfs-core": "^0.140.0", - "ipfs": "^0.50.2", - "ipfs-message-port-protocol": "^0.2.0" + "ipfs-message-port-protocol": "^0.2.0", + "typescript": "^4.0.3" }, "engines": { "node": ">=10.3.0", diff --git a/packages/ipfs-message-port-server/src/block.js b/packages/ipfs-message-port-server/src/block.js index 7657f50aa1..14071394ef 100644 --- a/packages/ipfs-message-port-server/src/block.js +++ b/packages/ipfs-message-port-server/src/block.js @@ -19,10 +19,7 @@ const { * @typedef {StatResult} Stat */ -/** - * @class - */ -class BlockService { +exports.BlockService = class BlockService { /** * @param {IPFS} ipfs */ @@ -35,10 +32,12 @@ class BlockService { * @property {EncodedBlock} block * @property {Transferable[]} transfer * - * @param {Object} query - * @param {EncodedCID} query.cid - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] + * @typedef {Object} GetQuery + * @property {EncodedCID} cid + * @property {number} [timeout] + * @property {AbortSignal} [query.signal] + * + * @param {GetQuery} query * @returns {Promise} */ async get (query) { @@ -54,17 +53,20 @@ class BlockService { * @property {EncodedBlock} block * @property {Transferable[]} transfer * + * @typedef {Object} PutQuery + * @property {EncodedBlock|Uint8Array} block + * @property {EncodedCID|void} [cid] + * @property {string} [format] + * @property {string} [mhtype] + * @property {number} [mhlen] + * @property {number} [version] + * @property {boolean} [pin] + * @property {number} [timeout] + * @property {AbortSignal} [signal] + * * Stores input as an IPFS block. - * @param {Object} query - * @param {EncodedBlock|Uint8Array} query.block - * @param {EncodedCID|void} [query.cid] - * @param {string} [query.format] - * @param {string} [query.mhtype] - * @param {number} [query.mhlen] - * @param {number} [query.version] - * @param {boolean} [query.pin] - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] + * + * @param {PutQuery} query * @returns {Promise} */ async put (query) { @@ -82,21 +84,22 @@ class BlockService { } /** - * Remove one or more IPFS block(s). - * - * @param {Object} query - * @param {EncodedCID[]} query.cids - * @param {boolean} [query.force] - * @param {boolean} [query.quiet] - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] - * @returns {Promise} + * @typedef {Object} RmQuery + * @property {EncodedCID[]} cids + * @property {boolean} [force] + * @property {boolean} [quiet] + * @property {number} [timeout] + * @property {AbortSignal} [signal] * * @typedef {RmEntry[]} RmResult * * @typedef {Object} RmEntry * @property {EncodedCID} cid * @property {EncodedError|undefined} [error] + * + * Remove one or more IPFS block(s). + * @param {RmQuery} query + * @returns {Promise} */ async rm (query) { /** @type {Transferable[]} */ @@ -109,17 +112,19 @@ class BlockService { } /** - * Gets information of a raw IPFS block. - * - * @param {Object} query - * @param {EncodedCID} query.cid - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] - * @returns {Promise} + * @typedef {Object} StatQuery + * @property {EncodedCID} cid + * @property {number} [timeout] + * @property {AbortSignal} [signal] * * @typedef {Object} StatResult * @property {EncodedCID} cid * @property {number} size + * + * Gets information of a raw IPFS block. + * + * @param {StatQuery} query + * @returns {Promise} */ async stat (query) { const cid = decodeCID(query.cid) @@ -143,5 +148,3 @@ const encodeRmEntry = (entry, transfer) => { return { cid } } } - -exports.BlockService = BlockService diff --git a/packages/ipfs-message-port-server/src/core.js b/packages/ipfs-message-port-server/src/core.js index da7952b415..97092e2187 100644 --- a/packages/ipfs-message-port-server/src/core.js +++ b/packages/ipfs-message-port-server/src/core.js @@ -64,7 +64,7 @@ const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') * * @typedef {Object} FileInput * @property {string} [path] - * @property {FileContent} content + * @property {FileContent} [content] * @property {Mode} [mode] * @property {Time} [mtime] * @@ -77,24 +77,12 @@ const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') * @property {UnixFSTime} mtime * @property {number} size * - * @typedef {Object} CatQuery - * @property {string} path - * @property {number} [offset] - * @property {number} [length] - * - * @typedef {Object} GetQuery - * @property {string} path - * - * @typedef {RemoteIterable} GetResult - * * @typedef {Object} FileEntry * @property {string} path * @property {RemoteIterable} content * @property {Mode} [mode] * @property {UnixFSTime} [mtime] * - * @typedef {Object} LsQuery - * @property {string} path * * @typedef {Object} EncodedLsEntry * @property {EncodedCID} cid @@ -103,14 +91,11 @@ const { decodeCID, encodeCID } = require('ipfs-message-port-protocol/src/cid') * @property {string} path * @property {number} depth * @property {number} size - * @property {Mode} [mode] + * @property {Mode} mode * @property {UnixFSTime} [mtime] */ -/** - * @class - */ -class CoreService { +exports.CoreService = class CoreService { /** * * @param {IPFS} ipfs @@ -212,16 +197,18 @@ class CoreService { } /** + * @typedef {Object} CatQuery + * @property {string|EncodedCID} path + * @property {number} [offset] + * @property {number} [length] + * @property {number} [timeout] + * @property {AbortSignal} [signal] + * * @typedef {Object} CatResult * @property {RemoteIterable} data * @property {Transferable[]} transfer * - * @param {Object} query - * @param {string|EncodedCID} query.path - * @param {number} [query.offset] - * @param {number} [query.length] - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] + * @param {CatQuery} query * @returns {CatResult} */ cat (query) { @@ -232,16 +219,18 @@ class CoreService { } /** + * @typedef {Object} LsQuery + * @property {string|EncodedCID} path + * @property {boolean} [preload] + * @property {boolean} [recursive] + * @property {number} [timeout] + * @property {AbortSignal} [signal] + * * @typedef {Object} LsResult * @property {RemoteIterable} data * @property {Transferable[]} transfer * - * @param {Object} query - * @param {string|EncodedCID} query.path - * @param {boolean} [query.preload] - * @param {boolean} [query.recursive] - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] + * @param {LsQuery} query * @returns {LsResult} */ ls (query) { @@ -287,7 +276,7 @@ const decodeAddInput = input => const decodeFileInput = input => matchInput(input, file => ({ ...file, - content: decodeFileContent(file.content) + content: file.content && decodeFileContent(file.content) })) /** @@ -344,7 +333,7 @@ const encodeAddResult = out => { /** * - * @param {AsyncIterable} content + * @param {AsyncIterable} content * @returns {CatResult} */ const encodeCatResult = content => { @@ -383,9 +372,9 @@ const encodeLsEntry = ({ depth, name, path, size, cid, type, mode, mtime }) => ( /** * Adds underlying `ArrayBuffer` to the transfer list. * - * @param {Buffer} buffer + * @param {Uint8Array} buffer * @param {Transferable[]} transfer - * @returns {Buffer} + * @returns {Uint8Array} */ const moveBuffer = (buffer, transfer) => { transfer.push(buffer.buffer) @@ -409,5 +398,3 @@ const encodeFileOutput = (file, _transfer) => ({ * @returns {T} */ const identity = v => v - -exports.CoreService = CoreService diff --git a/packages/ipfs-message-port-server/src/dag.js b/packages/ipfs-message-port-server/src/dag.js index 68900f8a0a..386af65b26 100644 --- a/packages/ipfs-message-port-server/src/dag.js +++ b/packages/ipfs-message-port-server/src/dag.js @@ -17,10 +17,7 @@ const collect = require('it-all') * @property {string} remainderPath */ -/** - * @class - */ -class DAGService { +exports.DAGService = class DAGService { /** * @param {IPFS} ipfs */ @@ -29,15 +26,16 @@ class DAGService { } /** - * @param {Object} query - * @param {EncodedDAGNode} query.dagNode - * @param {string} [query.format] - * @param {string} [query.hashAlg] - * @param {EncodedCID|void} [query.cid] - * @param {boolean} [query.pin] - * @param {boolean} [query.preload] - * @param {number} [query.timeout] - * @param {AbortSignal} [query.signal] + * @typedef {Object} PutDag + * @property {EncodedDAGNode} dagNode + * @property {string} [hashAlg] + * @property {EncodedCID|void} [cid] + * @property {boolean} [pin] + * @property {boolean} [preload] + * @property {number} [timeout] + * @property {AbortSignal} [signal] + * + * @param {PutDag} query * @returns {Promise} */ async put (query) { @@ -148,5 +146,3 @@ const decodePathOrCID = (input) => { * @param {EncodedDAGNode} value * @returns {DAGNode} */ - -exports.DAGService = DAGService diff --git a/packages/ipfs-message-port-server/src/files.js b/packages/ipfs-message-port-server/src/files.js index e88a724d2a..f1084dcd5b 100644 --- a/packages/ipfs-message-port-server/src/files.js +++ b/packages/ipfs-message-port-server/src/files.js @@ -14,10 +14,7 @@ const { encodeCID } = require('ipfs-message-port-protocol/src/cid') * @typedef {Stat} EncodedStat */ -/** - * @class - */ -class FilesService { +exports.FilesService = class FilesService { /** * * @param {IPFS} ipfs @@ -59,4 +56,3 @@ class FilesService { return { stat: { ...stat, cid: encodeCID(stat.cid, transfer) }, transfer } } } -exports.FilesService = FilesService diff --git a/packages/ipfs-message-port-server/src/ipfs.ts b/packages/ipfs-message-port-server/src/ipfs.ts index f8538553a6..a9d23efd5d 100644 --- a/packages/ipfs-message-port-server/src/ipfs.ts +++ b/packages/ipfs-message-port-server/src/ipfs.ts @@ -21,12 +21,12 @@ export interface IPFSFactory { create(): Promise } -interface AbortOptions { +export interface AbortOptions { timeout?: number signal?: AbortSignal } -interface PutOptions extends AbortOptions { +export interface PutOptions extends AbortOptions { format?: string | void hashAlg?: string | void cid?: CID | void @@ -34,16 +34,16 @@ interface PutOptions extends AbortOptions { pin?: boolean } -interface GetOptions extends AbortOptions { +export interface GetOptions extends AbortOptions { path?: string, localResolve?: boolean } -interface ResolveOptions extends AbortOptions { +export interface ResolveOptions extends AbortOptions { path?: string } -interface TreeOptions extends AbortOptions { +export interface TreeOptions extends AbortOptions { path?: string, recursive?: boolean } @@ -58,12 +58,12 @@ export interface DAG { export interface Core { addAll(inputs: AddAllInput, options: AddOptions): AsyncIterable add(input: AddInput, options: AddOptions): Promise - cat(ipfsPath: CID | string, options: CatOptions): AsyncIterable + cat(ipfsPath: CID | string, options: CatOptions): AsyncIterable ls(ipfsPath: CID | string, options: CoreLsOptions): AsyncIterable } -interface AddOptions extends AbortOptions { +export interface AddOptions extends AbortOptions { chunker?: string cidVersion?: number enableShardingExperiment?: boolean @@ -92,7 +92,7 @@ export type FileOutput = { size: number } -interface CatOptions extends AbortOptions { +export interface CatOptions extends AbortOptions { offset?: number length?: number } @@ -116,7 +116,7 @@ export interface Files { stat(path: string, options?: StatOptions): Promise } -interface ChmodOptions extends AbortOptions { +export interface ChmodOptions extends AbortOptions { recursive?: boolean flush?: boolean hashAlg?: string @@ -138,13 +138,13 @@ export type LsEntry = { mtime?: UnixFSTime } -interface StatOptions extends AbortOptions { +export interface StatOptions extends AbortOptions { hash?: boolean size?: boolean withLocal?: boolean } -type Stat = { +export type Stat = { cid: CID size: number cumulativeSize: number @@ -155,14 +155,14 @@ type Stat = { sizeLocal: number } -type WriteContent = +export type WriteContent = | string | ArrayBufferView | ArrayBuffer | Blob | AsyncIterable -type AddInput = +export type AddInput = | Blob | string | ArrayBufferView @@ -170,7 +170,7 @@ type AddInput = | FileInput | ReadStream -type AddAllInput = +export type AddAllInput = | Iterable | AsyncIterable @@ -189,7 +189,7 @@ export type FileContent = | Iterable | AsyncIterable -interface WriteOptions extends AbortOptions { +export interface WriteOptions extends AbortOptions { offset?: number length?: number create?: boolean @@ -203,20 +203,20 @@ interface WriteOptions extends AbortOptions { cidVersion?: CIDVersion } -type WriteResult = { +export type WriteResult = { cid: CID size: number } -interface Block { +export interface Block { cid: CID - data: Buffer + data: Uint8Array } -interface BlockService { +export interface BlockService { get(cid: CID, options?: GetBlockOptions): Promise put(block: Block, options?: PutBlockOptions): Promise - put(buffer: Buffer, options?: PutBufferOptions): Promise + put(buffer: Uint8Array, options?: PutBufferOptions): Promise rm( cid: CID | CID[], options?: RmBlockOptions @@ -227,21 +227,21 @@ interface BlockService { ): Promise<{ cid: CID; size: number }> } -interface GetBlockOptions extends AbortOptions { } // eslint-disable-line @typescript-eslint/no-empty-interface -interface PutBlockOptions extends AbortOptions { +export interface GetBlockOptions extends AbortOptions { } // eslint-disable-line @typescript-eslint/no-empty-interface +export interface PutBlockOptions extends AbortOptions { format?: string mhtype?: string mhlen?: number version?: number pin?: boolean } -interface PutBufferOptions extends PutBlockOptions { +export interface PutBufferOptions extends PutBlockOptions { cid?: EncodedCID | void } -interface RmBlockOptions extends AbortOptions { +export interface RmBlockOptions extends AbortOptions { force?: boolean quiet?: boolean } -interface StatBlockOptions extends AbortOptions { } // eslint-disable-line @typescript-eslint/no-empty-interface +export interface StatBlockOptions extends AbortOptions { } // eslint-disable-line @typescript-eslint/no-empty-interface diff --git a/packages/ipfs-message-port-server/src/server.js b/packages/ipfs-message-port-server/src/server.js index 12e9bf42a2..87f0ccc619 100644 --- a/packages/ipfs-message-port-server/src/server.js +++ b/packages/ipfs-message-port-server/src/server.js @@ -91,7 +91,7 @@ const { encodeError } = require('ipfs-message-port-protocol/src/error') * @extends {ServiceQuery} */ -class Query { +const Query = class Query { /** * @param {Namespace} namespace * @param {Method} method @@ -118,6 +118,7 @@ class Query { this.fail(new AbortError()) } } +exports.Query = Query /** * @template T @@ -130,7 +131,7 @@ class Query { * @template T */ -class Server { +exports.Server = class Server { /** * @param {MultiService} services */ @@ -262,7 +263,7 @@ class Server { } } -class UnsupportedMessageError extends RangeError { +const UnsupportedMessageError = class UnsupportedMessageError extends RangeError { /** * @param {MessageEvent} event */ @@ -275,13 +276,11 @@ class UnsupportedMessageError extends RangeError { return this.constructor.name } } +exports.UnsupportedMessageError = UnsupportedMessageError -class AbortError extends Error { +const AbortError = class AbortError extends Error { get name () { return this.constructor.name } } - -exports.Query = Query -exports.Server = Server exports.AbortError = AbortError diff --git a/packages/ipfs-message-port-server/src/service.js b/packages/ipfs-message-port-server/src/service.js index c4820acfce..5c48f3836b 100644 --- a/packages/ipfs-message-port-server/src/service.js +++ b/packages/ipfs-message-port-server/src/service.js @@ -11,7 +11,7 @@ const { BlockService } = require('./block') * @typedef {import('./ipfs').IPFS} IPFS */ -class IPFSService { +exports.IPFSService = class IPFSService { /** * * @param {IPFS} ipfs @@ -23,5 +23,3 @@ class IPFSService { this.block = new BlockService(ipfs) } } - -exports.IPFSService = IPFSService diff --git a/packages/ipfs-message-port-server/tsconfig.json b/packages/ipfs-message-port-server/tsconfig.json index ad80c7d3cd..a7d52a397b 100644 --- a/packages/ipfs-message-port-server/tsconfig.json +++ b/packages/ipfs-message-port-server/tsconfig.json @@ -1,30 +1,15 @@ { + "extends": "../../tsconfig.json", "compilerOptions": { - "allowJs": true, - "checkJs": true, - "forceConsistentCasingInFileNames": true, - "noImplicitReturns": true, - "noImplicitAny": true, - "noImplicitThis": true, - "noFallthroughCasesInSwitch": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "strictFunctionTypes": true, - "strictNullChecks": true, - "strictPropertyInitialization": true, - "strict": true, - "alwaysStrict": true, - "esModuleInterop": true, - "target": "ES5", - "noEmit": true + "outDir": "dist" }, - "exclude": [ - "dist", - "node_modules/**" - ], "include": [ - "src/**/*.js", - "../ipfs-message-port-protocol/src/**/*.js" + "src", + "package.json" ], - "compileOnSave": false -} \ No newline at end of file + "references": [ + { + "path": "../ipfs-message-port-protocol" + } + ] +} diff --git a/packages/ipfs/.eslintrc b/packages/ipfs/.eslintrc index 88e5a82ccb..ea565dd639 100644 --- a/packages/ipfs/.eslintrc +++ b/packages/ipfs/.eslintrc @@ -1,3 +1,3 @@ { "extends": "ipfs" -} \ No newline at end of file +} diff --git a/packages/ipfs/package.json b/packages/ipfs/package.json index 36d914e537..5a7448a6b3 100644 --- a/packages/ipfs/package.json +++ b/packages/ipfs/package.json @@ -23,7 +23,9 @@ }, "scripts": { "lint": "aegir lint", - "build": "aegir build", + "build": "npm run build:bundle && npm run build:types", + "build:bundle": "aegir build", + "build:types": "tsc --build", "test": "echo 'Only interface tests live here'", "test:interface:core": "aegir test -f test/interface-core.js", "test:interface:http-js": "aegir test -f test/interface-http-js.js", @@ -43,7 +45,7 @@ "update-notifier": "^5.0.0" }, "devDependencies": { - "aegir": "^27.0.0", + "aegir": "^28.0.0", "clear-module": "^4.0.0", "cross-env": "^7.0.0", "delay": "^4.4.0", @@ -68,7 +70,15 @@ "stream-to-promise": "^3.0.0", "string-argv": "^0.3.1", "temp-write": "^4.0.0", - "wrtc": "^0.4.6" + "wrtc": "^0.4.6", + "typescript": "^4.0.3" + }, + "typesVersions": { + "*": { + "*": [ + "dist/*" + ] + } }, "browser": { "./src/cli.js": false, diff --git a/packages/ipfs/src/index.js b/packages/ipfs/src/index.js index a535d37d68..0891575466 100644 --- a/packages/ipfs/src/index.js +++ b/packages/ipfs/src/index.js @@ -1,3 +1,4 @@ +/* eslint-disable jsdoc/valid-types */ 'use strict' const IPFS = require('ipfs-core') diff --git a/packages/ipfs/test/interface-core.js b/packages/ipfs/test/interface-core.js index 9be13fd5f8..84331ccd49 100644 --- a/packages/ipfs/test/interface-core.js +++ b/packages/ipfs/test/interface-core.js @@ -48,7 +48,14 @@ describe('interface-ipfs-core tests', function () { tests.key(commonFactory) - tests.miscellaneous(commonFactory) + tests.miscellaneous(commonFactory, { + skip: [ + { + name: 'should include the ipfs-http-client version', + reason: 'Value is added by the http client and the client is not part of ipfs-core' + } + ] + }) tests.name(factory({ ipfsOptions: { diff --git a/packages/ipfs/test/interface-http-go.js b/packages/ipfs/test/interface-http-go.js index 3fe79cf675..98dff02fc7 100644 --- a/packages/ipfs/test/interface-http-go.js +++ b/packages/ipfs/test/interface-http-go.js @@ -460,10 +460,6 @@ describe('interface-ipfs-core over ipfs-http-client tests against go-ipfs', () = tests.miscellaneous(commonFactory, { skip: [ - { - name: 'should include the ipfs-http-client version', - reason: 'TODO not implemented in go-ipfs yet' - }, { name: 'should include the interface-ipfs-core version', reason: 'TODO not implemented in go-ipfs yet' @@ -551,7 +547,7 @@ describe('interface-ipfs-core over ipfs-http-client tests against go-ipfs', () = reason: 'FIXME go-ipfs throws invalid encoding: base58' }, { - name: 'should put a Protobuf encoded Buffer', + name: 'should put a Protobuf encoded Uint8Array', reason: 'FIXME go-ipfs throws invalid encoding: protobuf' } ] diff --git a/packages/ipfs/tsconfig.json b/packages/ipfs/tsconfig.json new file mode 100644 index 0000000000..76a587cd96 --- /dev/null +++ b/packages/ipfs/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "package.json", + "./src" + ], + "references": [ + { + "path": "../ipfs-cli" + }, + { + "path": "../ipfs-core" + } + ] +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000000..61fe44e93d --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,71 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "forceConsistentCasingInFileNames": true, + "noImplicitReturns": false, + "noImplicitAny": false, + "noImplicitThis": true, + "noFallthroughCasesInSwitch": true, + "noUnusedLocals": true, + "noUnusedParameters": false, + "strictFunctionTypes": true, + "strictNullChecks": true, + "strictPropertyInitialization": true, + "strictBindCallApply": true, + "strict": true, + "alwaysStrict": true, + "esModuleInterop": true, + "target": "ES2018", + "moduleResolution": "node", + "declaration": true, + "declarationMap": true, + "skipLibCheck": true, + "stripInternal": true, + "resolveJsonModule": true, + "incremental": true, + "baseUrl": "packages", + "paths": { + "interface-ipfs-core/*": [ + "interface-ipfs-core/*" + ], + "ipfs/*": [ + "ipfs/*" + ], + "ipfs-cli/*": [ + "ipfs-cli/*" + ], + "ipfs-core/*": [ + "ipfs-core/*" + ], + "ipfs-core-utils/*": [ + "ipfs-core-utils/*" + ], + "ipfs-http-client/*": [ + "ipfs-http-client/*" + ], + "ipfs-http-gateway/*": [ + "ipfs-http-gateway/*" + ], + "ipfs-http-server/*": [ + "ipfs-http-server/*" + ], + "ipfs-message-port-protocol/*": [ + "ipfs-message-port-protocol/*" + ], + "ipfs-message-port-server/*": [ + "ipfs-message-port-server/*" + ] + }, + "composite": true, + "emitDeclarationOnly": true + }, + "exclude": [ + "dist", + "packages/*/dist", + "packages/*/test", + "packages/*/node_modules", + "node_modules" + ], + "compileOnSave": false +}