diff --git a/packages/upload-client/src/index.js b/packages/upload-client/src/index.js index f8dbbd35d..f560afc81 100644 --- a/packages/upload-client/src/index.js +++ b/packages/upload-client/src/index.js @@ -43,12 +43,12 @@ export * as Receipt from './receipts.js' * The issuer needs the `blob/add`, `index/add`, `filecoin/offer` and * `upload/add` delegated capability. * @param {import('./types.js').BlobLike} file File data. - * @param {import('./types.js').UploadOptions} [options] + * @param {import('./types.js').UploadFileOptions} [options] */ export async function uploadFile(conf, file, options = {}) { return await uploadBlockStream( conf, - UnixFS.createFileEncoderStream(file), + UnixFS.createFileEncoderStream(file, options), options ) } diff --git a/packages/upload-client/src/types.ts b/packages/upload-client/src/types.ts index 408e069df..7d49586e3 100644 --- a/packages/upload-client/src/types.ts +++ b/packages/upload-client/src/types.ts @@ -3,7 +3,7 @@ import type { ProgressStatus as XHRProgressStatus, } from 'ipfs-utils/src/types.js' import { Link, UnknownLink, Version, MultihashHasher } from 'multiformats' -import { Block } from '@ipld/unixfs' +import { Block, EncoderSettings } from '@ipld/unixfs' import { ServiceMethod, ConnectionView, @@ -347,6 +347,13 @@ export interface UnixFSDirectoryEncoderOptions { onDirectoryEntryLink?: (link: DirectoryEntryLink) => void } +export interface UnixFSEncoderSettingsOptions { + /** + * Settings for UnixFS encoding. + */ + settings?: EncoderSettings +} + export interface ShardingOptions { /** * The target shard size. Actual size of CAR output may be bigger due to CAR @@ -379,11 +386,19 @@ export interface UploadOptions pieceHasher?: MultihashHasher } +export interface UploadFileOptions + extends UploadOptions, + UnixFSEncoderSettingsOptions {} + export interface UploadDirectoryOptions extends UploadOptions, - UnixFSDirectoryEncoderOptions, - UploadProgressTrackable { - /** whether the directory files have already been ordered in a custom way. indicates that the upload must not use a different order than the one provided. */ + UnixFSEncoderSettingsOptions, + UnixFSDirectoryEncoderOptions { + /** + * Whether the directory files have already been ordered in a custom way. + * Indicates that the upload must not use a different order than the one + * provided. + */ customOrder?: boolean } diff --git a/packages/upload-client/src/unixfs.js b/packages/upload-client/src/unixfs.js index 3547ba71f..5bb902854 100644 --- a/packages/upload-client/src/unixfs.js +++ b/packages/upload-client/src/unixfs.js @@ -6,7 +6,7 @@ import { withWidth } from '@ipld/unixfs/file/layout/balanced' const SHARD_THRESHOLD = 1000 // shard directory after > 1,000 items const queuingStrategy = UnixFS.withCapacity() -const settings = UnixFS.configure({ +const defaultSettings = UnixFS.configure({ fileChunkEncoder: raw, smallFileEncoder: raw, chunker: withMaxChunkSize(1024 * 1024), @@ -15,10 +15,11 @@ const settings = UnixFS.configure({ /** * @param {import('./types.js').BlobLike} blob + * @param {import('./types.js').UnixFSEncoderSettingsOptions} [options] * @returns {Promise} */ -export async function encodeFile(blob) { - const readable = createFileEncoderStream(blob) +export async function encodeFile(blob, options) { + const readable = createFileEncoderStream(blob, options) const blocks = await collect(readable) // @ts-expect-error There is always a root block return { cid: blocks.at(-1).cid, blocks } @@ -26,11 +27,13 @@ export async function encodeFile(blob) { /** * @param {import('./types.js').BlobLike} blob + * @param {import('./types.js').UnixFSEncoderSettingsOptions} [options] * @returns {ReadableStream} */ -export function createFileEncoderStream(blob) { +export function createFileEncoderStream(blob, options) { /** @type {TransformStream} */ const { readable, writable } = new TransformStream({}, queuingStrategy) + const settings = options?.settings ?? defaultSettings const unixfsWriter = UnixFS.createWriter({ writable, settings }) const fileBuilder = new UnixFSFileBuilder('', blob) void (async () => { @@ -101,7 +104,7 @@ class UnixFSDirectoryBuilder { /** * @param {Iterable} files - * @param {import('./types.js').UnixFSDirectoryEncoderOptions} [options] + * @param {import('./types.js').UnixFSEncoderSettingsOptions & import('./types.js').UnixFSDirectoryEncoderOptions} [options] * @returns {Promise} */ export async function encodeDirectory(files, options) { @@ -113,7 +116,7 @@ export async function encodeDirectory(files, options) { /** * @param {Iterable} files - * @param {import('./types.js').UnixFSDirectoryEncoderOptions} [options] + * @param {import('./types.js').UnixFSEncoderSettingsOptions & import('./types.js').UnixFSDirectoryEncoderOptions} [options] * @returns {ReadableStream} */ export function createDirectoryEncoderStream(files, options) { @@ -145,6 +148,7 @@ export function createDirectoryEncoderStream(files, options) { /** @type {TransformStream} */ const { readable, writable } = new TransformStream({}, queuingStrategy) + const settings = options?.settings ?? defaultSettings const unixfsWriter = UnixFS.createWriter({ writable, settings }) void (async () => { const link = await rootDir.finalize(unixfsWriter) diff --git a/packages/upload-client/test/unixfs.test.js b/packages/upload-client/test/unixfs.test.js index 2a45be0ad..d8da41945 100644 --- a/packages/upload-client/test/unixfs.test.js +++ b/packages/upload-client/test/unixfs.test.js @@ -1,9 +1,10 @@ import assert from 'assert' -import { decode, NodeType } from '@ipld/unixfs' +import { decode, NodeType, defaults } from '@ipld/unixfs' import { exporter } from 'ipfs-unixfs-exporter' // @ts-expect-error this version of blockstore-core doesn't point to correct types file in package.json, and upgrading to latest version that fixes that leads to api changes import { MemoryBlockstore } from 'blockstore-core/memory' import * as raw from 'multiformats/codecs/raw' +import * as Link from 'multiformats/link' import path from 'path' import { encodeFile, encodeDirectory } from '../src/unixfs.js' import { File } from './helpers/shims.js' @@ -105,6 +106,20 @@ describe('UnixFS', () => { assert.equal(cid.code, raw.code) }) + it('configured to output v0 CIDs', async () => { + const file = new Blob(['test']) + const { cid } = await encodeFile(file, { + settings: { + ...defaults(), + linker: { + // @ts-expect-error + createLink: (_, digest) => Link.createLegacy(digest), + }, + }, + }) + assert.equal(cid.version, 0) + }) + it('callback for each directory entry link', async () => { const files = [ new File(['file'], 'file.txt'), diff --git a/packages/w3up-client/src/client.js b/packages/w3up-client/src/client.js index f716f0a1c..08027fc69 100644 --- a/packages/w3up-client/src/client.js +++ b/packages/w3up-client/src/client.js @@ -116,7 +116,7 @@ export class Client extends Base { * - `upload/add` * * @param {import('./types.js').BlobLike} file - File data. - * @param {import('./types.js').UploadOptions} [options] + * @param {import('./types.js').UploadFileOptions} [options] */ async uploadFile(file, options = {}) { const conf = await this._invocationConfig([ diff --git a/packages/w3up-client/src/types.ts b/packages/w3up-client/src/types.ts index 7b92f9cd7..1480715fa 100644 --- a/packages/w3up-client/src/types.ts +++ b/packages/w3up-client/src/types.ts @@ -155,6 +155,7 @@ export type { ShardingOptions, ShardStoringOptions, UploadOptions, + UploadFileOptions, UploadDirectoryOptions, FileLike, BlobLike,