From dde1580e61756a8df7ff7c450e44d8b089eda7b8 Mon Sep 17 00:00:00 2001 From: jtsmedley <38006759+jtsmedley@users.noreply.github.com> Date: Mon, 1 Aug 2022 14:58:17 -0500 Subject: [PATCH 1/3] Cid version support dist (#1) * Adds support for setting the CID version * Distribution Version for Temporary Support --- .gitignore | 1 - dist/cjs/blockstore/fs.js | 76 ++++++++++ dist/cjs/blockstore/idb.js | 65 +++++++++ dist/cjs/blockstore/index.js | 2 + dist/cjs/blockstore/memory.js | 35 +++++ dist/cjs/cli/cli.js | 131 ++++++++++++++++++ dist/cjs/cli/lib.js | 34 +++++ dist/cjs/pack/blob.js | 32 +++++ dist/cjs/pack/constants.js | 13 ++ dist/cjs/pack/fs.js | 39 ++++++ dist/cjs/pack/index.js | 67 +++++++++ dist/cjs/pack/stream.js | 72 ++++++++++ dist/cjs/pack/utils/normalise-input.js | 28 ++++ dist/cjs/unpack/fs.js | 51 +++++++ dist/cjs/unpack/index.js | 47 +++++++ .../utils/verifying-get-only-blockstore.js | 39 ++++++ dist/esm/blockstore/fs.js | 69 +++++++++ dist/esm/blockstore/idb.js | 42 ++++++ dist/esm/blockstore/index.js | 1 + dist/esm/blockstore/memory.js | 31 +++++ dist/esm/cli/cli.js | 126 +++++++++++++++++ dist/esm/cli/lib.js | 25 ++++ dist/esm/pack/blob.js | 25 ++++ dist/esm/pack/constants.js | 10 ++ dist/esm/pack/fs.js | 32 +++++ dist/esm/pack/index.js | 60 ++++++++ dist/esm/pack/stream.js | 65 +++++++++ dist/esm/pack/utils/normalise-input.js | 24 ++++ dist/esm/package.json | 1 + dist/esm/unpack/fs.js | 42 ++++++ dist/esm/unpack/index.js | 39 ++++++ .../utils/verifying-get-only-blockstore.js | 35 +++++ dist/types/blockstore/fs.d.ts | 19 +++ dist/types/blockstore/idb.d.ts | 21 +++ dist/types/blockstore/index.d.ts | 5 + dist/types/blockstore/memory.d.ts | 15 ++ dist/types/cli/cli.d.ts | 2 + dist/types/cli/lib.d.ts | 9 ++ dist/types/pack/blob.d.ts | 7 + dist/types/pack/constants.d.ts | 2 + dist/types/pack/fs.d.ts | 9 ++ dist/types/pack/index.d.ts | 22 +++ dist/types/pack/stream.d.ts | 10 ++ dist/types/pack/utils/normalise-input.d.ts | 5 + dist/types/unpack/fs.d.ts | 15 ++ dist/types/unpack/index.d.ts | 10 ++ .../utils/verifying-get-only-blockstore.d.ts | 15 ++ src/pack/index.ts | 7 +- src/pack/stream.ts | 5 +- 49 files changed, 1532 insertions(+), 5 deletions(-) create mode 100644 dist/cjs/blockstore/fs.js create mode 100644 dist/cjs/blockstore/idb.js create mode 100644 dist/cjs/blockstore/index.js create mode 100644 dist/cjs/blockstore/memory.js create mode 100644 dist/cjs/cli/cli.js create mode 100644 dist/cjs/cli/lib.js create mode 100644 dist/cjs/pack/blob.js create mode 100644 dist/cjs/pack/constants.js create mode 100644 dist/cjs/pack/fs.js create mode 100644 dist/cjs/pack/index.js create mode 100644 dist/cjs/pack/stream.js create mode 100644 dist/cjs/pack/utils/normalise-input.js create mode 100644 dist/cjs/unpack/fs.js create mode 100644 dist/cjs/unpack/index.js create mode 100644 dist/cjs/unpack/utils/verifying-get-only-blockstore.js create mode 100644 dist/esm/blockstore/fs.js create mode 100644 dist/esm/blockstore/idb.js create mode 100644 dist/esm/blockstore/index.js create mode 100644 dist/esm/blockstore/memory.js create mode 100644 dist/esm/cli/cli.js create mode 100644 dist/esm/cli/lib.js create mode 100644 dist/esm/pack/blob.js create mode 100644 dist/esm/pack/constants.js create mode 100644 dist/esm/pack/fs.js create mode 100644 dist/esm/pack/index.js create mode 100644 dist/esm/pack/stream.js create mode 100644 dist/esm/pack/utils/normalise-input.js create mode 100644 dist/esm/package.json create mode 100644 dist/esm/unpack/fs.js create mode 100644 dist/esm/unpack/index.js create mode 100644 dist/esm/unpack/utils/verifying-get-only-blockstore.js create mode 100644 dist/types/blockstore/fs.d.ts create mode 100644 dist/types/blockstore/idb.d.ts create mode 100644 dist/types/blockstore/index.d.ts create mode 100644 dist/types/blockstore/memory.d.ts create mode 100644 dist/types/cli/cli.d.ts create mode 100644 dist/types/cli/lib.d.ts create mode 100644 dist/types/pack/blob.d.ts create mode 100644 dist/types/pack/constants.d.ts create mode 100644 dist/types/pack/fs.d.ts create mode 100644 dist/types/pack/index.d.ts create mode 100644 dist/types/pack/stream.d.ts create mode 100644 dist/types/pack/utils/normalise-input.d.ts create mode 100644 dist/types/unpack/fs.d.ts create mode 100644 dist/types/unpack/index.d.ts create mode 100644 dist/types/unpack/utils/verifying-get-only-blockstore.d.ts diff --git a/.gitignore b/.gitignore index 6704566..adb2c19 100644 --- a/.gitignore +++ b/.gitignore @@ -80,7 +80,6 @@ typings/ # Nuxt.js build / generate output .nuxt -dist # Gatsby files .cache/ diff --git a/dist/cjs/blockstore/fs.js b/dist/cjs/blockstore/fs.js new file mode 100644 index 0000000..e6d1d3d --- /dev/null +++ b/dist/cjs/blockstore/fs.js @@ -0,0 +1,76 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FsBlockStore = void 0; +const fs_1 = __importDefault(require("fs")); +const os_1 = __importDefault(require("os")); +const multiformats_1 = require("multiformats"); +const blockstore_core_1 = require("blockstore-core"); +class FsBlockStore extends blockstore_core_1.BaseBlockstore { + constructor() { + super(); + this.path = `${os_1.default.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; + this._opened = false; + } + async _open() { + if (this._opening) { + await this._opening; + } + else { + this._opening = fs_1.default.promises.mkdir(this.path); + await this._opening; + this._opened = true; + } + } + async put(cid, bytes) { + if (!this._opened) { + await this._open(); + } + const cidStr = cid.toString(); + const location = `${this.path}/${cidStr}`; + await fs_1.default.promises.writeFile(location, bytes); + } + async get(cid) { + if (!this._opened) { + await this._open(); + } + const cidStr = cid.toString(); + const location = `${this.path}/${cidStr}`; + const bytes = await fs_1.default.promises.readFile(location); + return bytes; + } + async has(cid) { + if (!this._opened) { + await this._open(); + } + const cidStr = cid.toString(); + const location = `${this.path}/${cidStr}`; + try { + await fs_1.default.promises.access(location); + return true; + } + catch (err) { + return false; + } + } + async *blocks() { + if (!this._opened) { + await this._open(); + } + const cids = await fs_1.default.promises.readdir(this.path); + for (const cidStr of cids) { + const location = `${this.path}/${cidStr}`; + const bytes = await fs_1.default.promises.readFile(location); + yield { cid: multiformats_1.CID.parse(cidStr), bytes }; + } + } + async close() { + if (this._opened) { + await fs_1.default.promises.rm(this.path, { recursive: true }); + } + this._opened = false; + } +} +exports.FsBlockStore = FsBlockStore; diff --git a/dist/cjs/blockstore/idb.js b/dist/cjs/blockstore/idb.js new file mode 100644 index 0000000..50ea8fb --- /dev/null +++ b/dist/cjs/blockstore/idb.js @@ -0,0 +1,65 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IdbBlockStore = void 0; +const idb = __importStar(require("idb-keyval")); +const multiformats_1 = require("multiformats"); +const blockstore_core_1 = require("blockstore-core"); +/** + * Save blocks to IndexedDB in the browser via idb-keyval + * Creates a probably unique indexed db per instance to ensure that the + * blocks iteration method only returns blocks from this invocation, + * and so that the caller can destory it without affecting others. + */ +class IdbBlockStore extends blockstore_core_1.BaseBlockstore { + constructor() { + super(); + const dbName = `IdbBlockStore-${Date.now()}-${Math.random()}`; + this.store = idb.createStore(dbName, `IdbBlockStore`); + } + async *blocks() { + const keys = await idb.keys(this.store); + for await (const key of keys) { + yield { + cid: multiformats_1.CID.parse(key.toString()), + bytes: await idb.get(key, this.store) + }; + } + } + async put(cid, bytes) { + await idb.set(cid.toString(), bytes, this.store); + } + async get(cid) { + const bytes = await idb.get(cid.toString(), this.store); + if (!bytes) { + throw new Error(`block with cid ${cid.toString()} no found`); + } + return bytes; + } + async has(cid) { + const bytes = await idb.get(cid.toString(), this.store); + return Boolean(bytes); + } + async close() { + return idb.clear(this.store); + } +} +exports.IdbBlockStore = IdbBlockStore; diff --git a/dist/cjs/blockstore/index.js b/dist/cjs/blockstore/index.js new file mode 100644 index 0000000..ce03781 --- /dev/null +++ b/dist/cjs/blockstore/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/dist/cjs/blockstore/memory.js b/dist/cjs/blockstore/memory.js new file mode 100644 index 0000000..63e3a2a --- /dev/null +++ b/dist/cjs/blockstore/memory.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MemoryBlockStore = void 0; +const multiformats_1 = require("multiformats"); +const blockstore_core_1 = require("blockstore-core"); +class MemoryBlockStore extends blockstore_core_1.BaseBlockstore { + constructor() { + super(); + this.store = new Map(); + } + async *blocks() { + for (const [cidStr, bytes] of this.store.entries()) { + yield { cid: multiformats_1.CID.parse(cidStr), bytes }; + } + } + put(cid, bytes) { + this.store.set(cid.toString(), bytes); + return Promise.resolve(); + } + get(cid) { + const bytes = this.store.get(cid.toString()); + if (!bytes) { + throw new Error(`block with cid ${cid.toString()} no found`); + } + return Promise.resolve(bytes); + } + has(cid) { + return Promise.resolve(this.store.has(cid.toString())); + } + close() { + this.store.clear(); + return Promise.resolve(); + } +} +exports.MemoryBlockStore = MemoryBlockStore; diff --git a/dist/cjs/cli/cli.js b/dist/cjs/cli/cli.js new file mode 100644 index 0000000..549d27f --- /dev/null +++ b/dist/cjs/cli/cli.js @@ -0,0 +1,131 @@ +#!/usr/bin/env node +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const meow_1 = __importDefault(require("meow")); +const multiformats_1 = require("multiformats"); +const fs_1 = require("../pack/fs"); +const fs_2 = require("../unpack/fs"); +const lib_1 = require("./lib"); +const options = { + flags: { + output: { + type: 'string', + alias: 'o', + }, + pack: { + type: 'string', + alias: 'p' + }, + unpack: { + type: 'string', + alias: 'u', + }, + root: { + type: 'string', + isMultiple: true + }, + list: { + type: 'string', + alias: 'l', + }, + listCids: { + type: 'string' + }, + listRoots: { + type: 'string' + }, + wrapWithDirectory: { + type: 'boolean', + alias: 'w', + default: true + } + } +}; +const cli = (0, meow_1.default)(` + Content Addressable archives (CAR) store IPLD block data as a sequence of bytes; + typically in a file with a .car extension. The CAR format is a serialized + representation of any IPLD DAG (graph) as the concatenation of its blocks, plus + a header that describes the graphs in the file (via root CIDs). + + See: https://github.com/ipld/specs/blob/master/block-layer/content-addressable-archives.md + + Packing files into a .car + + # write a content addressed archive to the current working dir. + $ ipfs-car --pack path/to/file/or/dir + + # specify the car file name. + $ ipfs-car --pack path/to/files --output path/to/write/a.car + + # pack files without wrapping with top-level directory + $ ipfs-car --wrapWithDirectory false --pack path/to/files --output path/to/write/a.car + + Unpacking files from a .car + + # write 1 or more files to the current working dir. + $ ipfs-car --unpack path/to/my.car + + # unpack files to a specific path. + $ ipfs-car --unpack path/to/my.car --output /path/to/unpack/files/to + + # unpack specific roots + $ ipfs-car --unpack path/to/my.car --root [--root ] + + # unpack files from a .car on stdin. + $ cat path/to/my.car | ipfs-car --unpack + + Listing the contents of a .car + + # list the cids for all the blocks. + $ ipfs-car --list-cids path/to/my.car + + # list the cid roots. + $ ipfs-car --list-roots path/to/my.car + + # list the files. + $ ipfs-car --list path/to/my.car + + TL;DR + --pack --output + --unpack --output + +`, options); +async function handleInput({ flags }) { + if (flags.pack) { + const { root, filename } = await (0, fs_1.packToFs)({ input: flags.pack, output: flags.output, wrapWithDirectory: flags.wrapWithDirectory }); + // tslint:disable-next-line: no-console + console.log(`root CID: ${root.toString()}`); + // tslint:disable-next-line: no-console + console.log(` output: ${filename}`); + } + else if (flags.unpack !== undefined) { + const roots = (flags.root || []).map(r => multiformats_1.CID.parse(r)); + if (flags.unpack === '') { + return (0, fs_2.unpackStreamToFs)({ input: process.stdin, roots, output: flags.output }); + } + return (0, fs_2.unpackToFs)({ input: flags.unpack, roots, output: flags.output }); + } + else if (flags.list) { + return (0, lib_1.listFilesInCar)({ input: flags.list }); + } + else if (flags.listRoots) { + return (0, lib_1.listRootsInCar)({ input: flags.listRoots }); + } + else if (flags.listCids) { + return (0, lib_1.listCidsInCar)({ input: flags.listCids }); + } + else if (!process.stdin.isTTY) { + // maybe stream? + // tslint:disable-next-line: no-console + console.log('Reading .car from stdin'); + return (0, fs_2.unpackStreamToFs)({ input: process.stdin, output: flags.output }); + } + else { + cli.showHelp(); + throw new Error('--pack or --unpack flag required'); + } +} +handleInput(cli); diff --git a/dist/cjs/cli/lib.js b/dist/cjs/cli/lib.js new file mode 100644 index 0000000..a1429d9 --- /dev/null +++ b/dist/cjs/cli/lib.js @@ -0,0 +1,34 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.listRootsInCar = exports.listCidsInCar = exports.listFilesInCar = void 0; +const fs_1 = __importDefault(require("fs")); +const car_1 = require("@ipld/car"); +const unpack_1 = require("../unpack"); +async function listFilesInCar({ input }) { + const carReader = await car_1.CarIndexedReader.fromFile(input); + for await (const file of (0, unpack_1.unpack)(carReader)) { + // tslint:disable-next-line: no-console + console.log(file.path); + } +} +exports.listFilesInCar = listFilesInCar; +async function listCidsInCar({ input }) { + const carIterator = await car_1.CarCIDIterator.fromIterable(fs_1.default.createReadStream(input)); + for await (const cid of carIterator) { + // tslint:disable-next-line: no-console + console.log(cid.toString()); + } +} +exports.listCidsInCar = listCidsInCar; +async function listRootsInCar({ input }) { + const carIterator = await car_1.CarCIDIterator.fromIterable(fs_1.default.createReadStream(input)); + const roots = await carIterator.getRoots(); + for (const root of roots) { + // tslint:disable-next-line: no-console + console.log(root.toString()); + } +} +exports.listRootsInCar = listRootsInCar; diff --git a/dist/cjs/pack/blob.js b/dist/cjs/pack/blob.js new file mode 100644 index 0000000..9a32f44 --- /dev/null +++ b/dist/cjs/pack/blob.js @@ -0,0 +1,32 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.packToBlob = void 0; +const blob_1 = require("@web-std/blob"); +const it_all_1 = __importDefault(require("it-all")); +const memory_1 = require("../blockstore/memory"); +const index_1 = require("./index"); +async function packToBlob({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { + const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore(); + const { root, out } = await (0, index_1.pack)({ + input, + blockstore, + hasher, + maxChunkSize, + maxChildrenPerNode, + wrapWithDirectory, + rawLeaves + }); + const carParts = await (0, it_all_1.default)(out); + if (!userBlockstore) { + await blockstore.close(); + } + const car = new blob_1.Blob(carParts, { + // https://www.iana.org/assignments/media-types/application/vnd.ipld.car + type: 'application/vnd.ipld.car', + }); + return { root, car }; +} +exports.packToBlob = packToBlob; diff --git a/dist/cjs/pack/constants.js b/dist/cjs/pack/constants.js new file mode 100644 index 0000000..2c69cc8 --- /dev/null +++ b/dist/cjs/pack/constants.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unixfsImporterOptionsDefault = void 0; +const sha2_1 = require("multiformats/hashes/sha2"); +exports.unixfsImporterOptionsDefault = { + cidVersion: 1, + chunker: 'fixed', + maxChunkSize: 262144, + hasher: sha2_1.sha256, + rawLeaves: true, + wrapWithDirectory: true, + maxChildrenPerNode: 174 +}; diff --git a/dist/cjs/pack/fs.js b/dist/cjs/pack/fs.js new file mode 100644 index 0000000..6f9c9b0 --- /dev/null +++ b/dist/cjs/pack/fs.js @@ -0,0 +1,39 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.packToFs = void 0; +const fs_1 = __importDefault(require("fs")); +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +const move_file_1 = __importDefault(require("move-file")); +const stream_1 = require("./stream"); +const fs_2 = require("../blockstore/fs"); +async function packToFs({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { + const blockstore = userBlockstore ? userBlockstore : new fs_2.FsBlockStore(); + const location = output || `${os_1.default.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; + const writable = fs_1.default.createWriteStream(location); + const { root } = await (0, stream_1.packToStream)({ + input, + writable, + blockstore, + hasher, + maxChunkSize, + maxChildrenPerNode, + wrapWithDirectory, + rawLeaves + }); + if (!userBlockstore) { + await blockstore.close(); + } + // Move to work dir + if (!output) { + const basename = typeof input === 'string' ? path_1.default.parse(path_1.default.basename(input)).name : root.toString(); + const filename = `${basename}.car`; + await (0, move_file_1.default)(location, `${process.cwd()}/${filename}`); + return { root, filename }; + } + return { root, filename: output }; +} +exports.packToFs = packToFs; diff --git a/dist/cjs/pack/index.js b/dist/cjs/pack/index.js new file mode 100644 index 0000000..c1f7b6b --- /dev/null +++ b/dist/cjs/pack/index.js @@ -0,0 +1,67 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pack = void 0; +const it_last_1 = __importDefault(require("it-last")); +const it_pipe_1 = __importDefault(require("it-pipe")); +const car_1 = require("@ipld/car"); +const ipfs_unixfs_importer_1 = require("ipfs-unixfs-importer"); +const normalise_input_1 = require("./utils/normalise-input"); +const memory_1 = require("../blockstore/memory"); +const constants_1 = require("./constants"); +async function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { + if (!input || (Array.isArray(input) && !input.length)) { + throw new Error('missing input file(s)'); + } + const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore(); + // Consume the source + const rootEntry = await (0, it_last_1.default)((0, it_pipe_1.default)((0, normalise_input_1.getNormaliser)(input), (source) => (0, ipfs_unixfs_importer_1.importer)(source, blockstore, { + ...constants_1.unixfsImporterOptionsDefault, + hasher: hasher || constants_1.unixfsImporterOptionsDefault.hasher, + maxChunkSize: maxChunkSize || constants_1.unixfsImporterOptionsDefault.maxChunkSize, + maxChildrenPerNode: maxChildrenPerNode || constants_1.unixfsImporterOptionsDefault.maxChildrenPerNode, + wrapWithDirectory: wrapWithDirectory === false ? false : constants_1.unixfsImporterOptionsDefault.wrapWithDirectory, + rawLeaves: rawLeaves == null ? constants_1.unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: cidVersion || constants_1.unixfsImporterOptionsDefault.cidVersion + }))); + if (!rootEntry || !rootEntry.cid) { + throw new Error('given input could not be parsed correctly'); + } + const root = rootEntry.cid; + const { writer, out: carOut } = await car_1.CarWriter.create([root]); + const carOutIter = carOut[Symbol.asyncIterator](); + let writingPromise; + const writeAll = async () => { + for await (const block of blockstore.blocks()) { + // `await` will block until all bytes in `carOut` are consumed by the user + // so we have backpressure here + await writer.put(block); + } + await writer.close(); + if (!userBlockstore) { + await blockstore.close(); + } + }; + const out = { + [Symbol.asyncIterator]() { + if (writingPromise != null) { + throw new Error('Multiple iterator not supported'); + } + // don't start writing until the user starts consuming the iterator + writingPromise = writeAll(); + return { + async next() { + const result = await carOutIter.next(); + if (result.done) { + await writingPromise; // any errors will propagate from here + } + return result; + } + }; + } + }; + return { root, out }; +} +exports.pack = pack; diff --git a/dist/cjs/pack/stream.js b/dist/cjs/pack/stream.js new file mode 100644 index 0000000..f704838 --- /dev/null +++ b/dist/cjs/pack/stream.js @@ -0,0 +1,72 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.packToStream = void 0; +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const stream_1 = require("stream"); +const it_last_1 = __importDefault(require("it-last")); +const it_pipe_1 = __importDefault(require("it-pipe")); +const car_1 = require("@ipld/car"); +const ipfs_unixfs_importer_1 = require("ipfs-unixfs-importer"); +const normalise_input_multiple_1 = require("ipfs-core-utils/files/normalise-input-multiple"); +const glob_source_js_1 = __importDefault(require("ipfs-utils/src/files/glob-source.js")); +const memory_1 = require("../blockstore/memory"); +const constants_1 = require("./constants"); +// Node version of toCar with Node Stream Writable +async function packToStream({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { + if (!input || (Array.isArray(input) && !input.length)) { + throw new Error('given input could not be parsed correctly'); + } + input = typeof input === 'string' ? [input] : input; + const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore(); + // Consume the source + const rootEntry = await (0, it_last_1.default)((0, it_pipe_1.default)(legacyGlobSource(input), source => (0, normalise_input_multiple_1.normaliseInput)(source), (source) => (0, ipfs_unixfs_importer_1.importer)(source, blockstore, { + ...constants_1.unixfsImporterOptionsDefault, + hasher: hasher || constants_1.unixfsImporterOptionsDefault.hasher, + maxChunkSize: maxChunkSize || constants_1.unixfsImporterOptionsDefault.maxChunkSize, + maxChildrenPerNode: maxChildrenPerNode || constants_1.unixfsImporterOptionsDefault.maxChildrenPerNode, + wrapWithDirectory: wrapWithDirectory === false ? false : constants_1.unixfsImporterOptionsDefault.wrapWithDirectory, + rawLeaves: rawLeaves == null ? constants_1.unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: cidVersion || constants_1.unixfsImporterOptionsDefault.cidVersion + }))); + if (!rootEntry || !rootEntry.cid) { + throw new Error('given input could not be parsed correctly'); + } + const root = rootEntry.cid; + const { writer, out } = await car_1.CarWriter.create([root]); + stream_1.Readable.from(out).pipe(writable); + for await (const block of blockstore.blocks()) { + await writer.put(block); + } + await writer.close(); + if (!userBlockstore) { + await blockstore.close(); + } + return { root }; +} +exports.packToStream = packToStream; +/** + * This function replicates the old behaviour of globSource to not introduce a + * breaking change. + * + * TODO: figure out what the breaking change will be. + */ +async function* legacyGlobSource(input) { + for await (const p of input) { + const resolvedPath = path_1.default.resolve(p); + const stat = await fs_1.default.promises.stat(resolvedPath); + const fileName = path_1.default.basename(resolvedPath); + if (stat.isDirectory()) { + yield { path: fileName }; + for await (const candidate of (0, glob_source_js_1.default)(resolvedPath, '**/*')) { + yield { ...candidate, path: path_1.default.join(fileName, candidate.path) }; + } + } + else { + yield { path: fileName, content: fs_1.default.createReadStream(resolvedPath) }; + } + } +} diff --git a/dist/cjs/pack/utils/normalise-input.js b/dist/cjs/pack/utils/normalise-input.js new file mode 100644 index 0000000..020dc53 --- /dev/null +++ b/dist/cjs/pack/utils/normalise-input.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getNormaliser = void 0; +const normalise_input_single_1 = require("ipfs-core-utils/files/normalise-input-single"); +const normalise_input_multiple_1 = require("ipfs-core-utils/files/normalise-input-multiple"); +function isBytes(obj) { + return ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer; +} +function isBlob(obj) { + return Boolean(obj.constructor) && + (obj.constructor.name === 'Blob' || obj.constructor.name === 'File') && + typeof obj.stream === 'function'; +} +function isSingle(input) { + return typeof input === 'string' || input instanceof String || isBytes(input) || isBlob(input) || '_readableState' in input; +} +/** + * Get a single or multiple normaliser depending on the input. + */ +function getNormaliser(input) { + if (isSingle(input)) { + return (0, normalise_input_single_1.normaliseInput)(input); + } + else { + return (0, normalise_input_multiple_1.normaliseInput)(input); + } +} +exports.getNormaliser = getNormaliser; diff --git a/dist/cjs/unpack/fs.js b/dist/cjs/unpack/fs.js new file mode 100644 index 0000000..39efa0f --- /dev/null +++ b/dist/cjs/unpack/fs.js @@ -0,0 +1,51 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.writeFiles = exports.unpackStreamToFs = exports.unpackToFs = void 0; +const fs_1 = __importDefault(require("fs")); +const it_pipe_1 = __importDefault(require("it-pipe")); +const streaming_iterables_1 = require("streaming-iterables"); +const car_1 = require("@ipld/car"); +const fs_2 = require("../blockstore/fs"); +// @ts-ignore stream-to-it has no types exported +const stream_to_it_1 = __importDefault(require("stream-to-it")); +const index_1 = require("./index"); +// Node only, read a car from fs, write files to fs +async function unpackToFs({ input, roots, output }) { + const carReader = await car_1.CarIndexedReader.fromFile(input); + await writeFiles((0, index_1.unpack)(carReader, roots), output); +} +exports.unpackToFs = unpackToFs; +// Node only, read a stream, write files to fs +async function unpackStreamToFs({ input, roots, output, blockstore: userBlockstore }) { + const blockstore = userBlockstore ? userBlockstore : new fs_2.FsBlockStore(); + await writeFiles((0, index_1.unpackStream)(input, { roots, blockstore }), output); + if (!userBlockstore) { + await blockstore.close(); + } +} +exports.unpackStreamToFs = unpackStreamToFs; +async function writeFiles(source, output) { + for await (const file of source) { + let filePath = file.path; + // output overrides the first part of the path. + if (output) { + const parts = file.path.split('/'); + parts[0] = output; + filePath = parts.join('/'); + } + if (file.type === 'file' || file.type === 'raw') { + await (0, it_pipe_1.default)(file.content, (0, streaming_iterables_1.map)((chunk) => chunk.slice()), // BufferList to Buffer + stream_to_it_1.default.sink(fs_1.default.createWriteStream(filePath))); + } + else if (file.type === 'directory') { + await fs_1.default.promises.mkdir(filePath, { recursive: true }); + } + else { + throw new Error(`Unsupported UnixFS type ${file.type} for ${file.path}`); + } + } +} +exports.writeFiles = writeFiles; diff --git a/dist/cjs/unpack/index.js b/dist/cjs/unpack/index.js new file mode 100644 index 0000000..a6132a9 --- /dev/null +++ b/dist/cjs/unpack/index.js @@ -0,0 +1,47 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unpackStream = exports.unpack = void 0; +const browser_readablestream_to_it_1 = __importDefault(require("browser-readablestream-to-it")); +const iterator_1 = require("@ipld/car/iterator"); +const ipfs_unixfs_exporter_1 = require("ipfs-unixfs-exporter"); +const verifying_get_only_blockstore_1 = require("./utils/verifying-get-only-blockstore"); +const memory_1 = require("../blockstore/memory"); +// Export unixfs entries from car file +async function* unpack(carReader, roots) { + const verifyingBlockService = verifying_get_only_blockstore_1.VerifyingGetOnlyBlockStore.fromCarReader(carReader); + if (!roots || roots.length === 0) { + roots = await carReader.getRoots(); + } + for (const root of roots) { + yield* (0, ipfs_unixfs_exporter_1.recursive)(root, verifyingBlockService, { /* options */}); + } +} +exports.unpack = unpack; +async function* unpackStream(readable, { roots, blockstore: userBlockstore } = {}) { + const carIterator = await iterator_1.CarBlockIterator.fromIterable(asAsyncIterable(readable)); + const blockstore = userBlockstore || new memory_1.MemoryBlockStore(); + for await (const block of carIterator) { + await blockstore.put(block.cid, block.bytes); + } + const verifyingBlockStore = verifying_get_only_blockstore_1.VerifyingGetOnlyBlockStore.fromBlockstore(blockstore); + if (!roots || roots.length === 0) { + roots = await carIterator.getRoots(); + } + for (const root of roots) { + yield* (0, ipfs_unixfs_exporter_1.recursive)(root, verifyingBlockStore); + } +} +exports.unpackStream = unpackStream; +/** + * Upgrade a ReadableStream to an AsyncIterable if it isn't already + * + * ReadableStream (e.g res.body) is asyncIterable in node, but not in chrome, yet. + * see: https://bugs.chromium.org/p/chromium/issues/detail?id=929585 + */ +function asAsyncIterable(readable) { + // @ts-ignore how to convince tsc that we are checking the type here? + return Symbol.asyncIterator in readable ? readable : (0, browser_readablestream_to_it_1.default)(readable); +} diff --git a/dist/cjs/unpack/utils/verifying-get-only-blockstore.js b/dist/cjs/unpack/utils/verifying-get-only-blockstore.js new file mode 100644 index 0000000..0fc0b4c --- /dev/null +++ b/dist/cjs/unpack/utils/verifying-get-only-blockstore.js @@ -0,0 +1,39 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VerifyingGetOnlyBlockStore = void 0; +const equals_1 = require("uint8arrays/equals"); +const sha2_1 = require("multiformats/hashes/sha2"); +const blockstore_core_1 = require("blockstore-core"); +class VerifyingGetOnlyBlockStore extends blockstore_core_1.BaseBlockstore { + constructor(blockstore) { + super(); + this.store = blockstore; + } + async get(cid) { + const res = await this.store.get(cid); + if (!res) { + throw new Error(`Incomplete CAR. Block missing for CID ${cid}`); + } + if (!isValid({ cid, bytes: res })) { + throw new Error(`Invalid CAR. Hash of block data does not match CID ${cid}`); + } + return res; + } + static fromBlockstore(b) { + return new VerifyingGetOnlyBlockStore(b); + } + static fromCarReader(cr) { + return new VerifyingGetOnlyBlockStore({ + // Return bytes in the same fashion as a Blockstore implementation + get: async (cid) => { + const block = await cr.get(cid); + return block === null || block === void 0 ? void 0 : block.bytes; + } + }); + } +} +exports.VerifyingGetOnlyBlockStore = VerifyingGetOnlyBlockStore; +async function isValid({ cid, bytes }) { + const hash = await sha2_1.sha256.digest(bytes); + return (0, equals_1.equals)(hash.digest, cid.multihash.digest); +} diff --git a/dist/esm/blockstore/fs.js b/dist/esm/blockstore/fs.js new file mode 100644 index 0000000..cdb0540 --- /dev/null +++ b/dist/esm/blockstore/fs.js @@ -0,0 +1,69 @@ +import fs from 'fs'; +import os from 'os'; +import { CID } from 'multiformats'; +import { BaseBlockstore } from 'blockstore-core'; +export class FsBlockStore extends BaseBlockstore { + constructor() { + super(); + this.path = `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; + this._opened = false; + } + async _open() { + if (this._opening) { + await this._opening; + } + else { + this._opening = fs.promises.mkdir(this.path); + await this._opening; + this._opened = true; + } + } + async put(cid, bytes) { + if (!this._opened) { + await this._open(); + } + const cidStr = cid.toString(); + const location = `${this.path}/${cidStr}`; + await fs.promises.writeFile(location, bytes); + } + async get(cid) { + if (!this._opened) { + await this._open(); + } + const cidStr = cid.toString(); + const location = `${this.path}/${cidStr}`; + const bytes = await fs.promises.readFile(location); + return bytes; + } + async has(cid) { + if (!this._opened) { + await this._open(); + } + const cidStr = cid.toString(); + const location = `${this.path}/${cidStr}`; + try { + await fs.promises.access(location); + return true; + } + catch (err) { + return false; + } + } + async *blocks() { + if (!this._opened) { + await this._open(); + } + const cids = await fs.promises.readdir(this.path); + for (const cidStr of cids) { + const location = `${this.path}/${cidStr}`; + const bytes = await fs.promises.readFile(location); + yield { cid: CID.parse(cidStr), bytes }; + } + } + async close() { + if (this._opened) { + await fs.promises.rm(this.path, { recursive: true }); + } + this._opened = false; + } +} diff --git a/dist/esm/blockstore/idb.js b/dist/esm/blockstore/idb.js new file mode 100644 index 0000000..89f34cf --- /dev/null +++ b/dist/esm/blockstore/idb.js @@ -0,0 +1,42 @@ +import * as idb from 'idb-keyval'; +import { CID } from 'multiformats'; +import { BaseBlockstore } from 'blockstore-core'; +/** + * Save blocks to IndexedDB in the browser via idb-keyval + * Creates a probably unique indexed db per instance to ensure that the + * blocks iteration method only returns blocks from this invocation, + * and so that the caller can destory it without affecting others. + */ +export class IdbBlockStore extends BaseBlockstore { + constructor() { + super(); + const dbName = `IdbBlockStore-${Date.now()}-${Math.random()}`; + this.store = idb.createStore(dbName, `IdbBlockStore`); + } + async *blocks() { + const keys = await idb.keys(this.store); + for await (const key of keys) { + yield { + cid: CID.parse(key.toString()), + bytes: await idb.get(key, this.store) + }; + } + } + async put(cid, bytes) { + await idb.set(cid.toString(), bytes, this.store); + } + async get(cid) { + const bytes = await idb.get(cid.toString(), this.store); + if (!bytes) { + throw new Error(`block with cid ${cid.toString()} no found`); + } + return bytes; + } + async has(cid) { + const bytes = await idb.get(cid.toString(), this.store); + return Boolean(bytes); + } + async close() { + return idb.clear(this.store); + } +} diff --git a/dist/esm/blockstore/index.js b/dist/esm/blockstore/index.js new file mode 100644 index 0000000..509db18 --- /dev/null +++ b/dist/esm/blockstore/index.js @@ -0,0 +1 @@ +export {}; diff --git a/dist/esm/blockstore/memory.js b/dist/esm/blockstore/memory.js new file mode 100644 index 0000000..aef5699 --- /dev/null +++ b/dist/esm/blockstore/memory.js @@ -0,0 +1,31 @@ +import { CID } from 'multiformats'; +import { BaseBlockstore } from 'blockstore-core'; +export class MemoryBlockStore extends BaseBlockstore { + constructor() { + super(); + this.store = new Map(); + } + async *blocks() { + for (const [cidStr, bytes] of this.store.entries()) { + yield { cid: CID.parse(cidStr), bytes }; + } + } + put(cid, bytes) { + this.store.set(cid.toString(), bytes); + return Promise.resolve(); + } + get(cid) { + const bytes = this.store.get(cid.toString()); + if (!bytes) { + throw new Error(`block with cid ${cid.toString()} no found`); + } + return Promise.resolve(bytes); + } + has(cid) { + return Promise.resolve(this.store.has(cid.toString())); + } + close() { + this.store.clear(); + return Promise.resolve(); + } +} diff --git a/dist/esm/cli/cli.js b/dist/esm/cli/cli.js new file mode 100644 index 0000000..dbd1783 --- /dev/null +++ b/dist/esm/cli/cli.js @@ -0,0 +1,126 @@ +#!/usr/bin/env node +import meow from 'meow'; +import { CID } from 'multiformats'; +import { packToFs } from "../pack/fs.js"; +import { unpackToFs, unpackStreamToFs } from "../unpack/fs.js"; +import { listFilesInCar, listCidsInCar, listRootsInCar } from "./lib.js"; +const options = { + flags: { + output: { + type: 'string', + alias: 'o', + }, + pack: { + type: 'string', + alias: 'p' + }, + unpack: { + type: 'string', + alias: 'u', + }, + root: { + type: 'string', + isMultiple: true + }, + list: { + type: 'string', + alias: 'l', + }, + listCids: { + type: 'string' + }, + listRoots: { + type: 'string' + }, + wrapWithDirectory: { + type: 'boolean', + alias: 'w', + default: true + } + } +}; +const cli = meow(` + Content Addressable archives (CAR) store IPLD block data as a sequence of bytes; + typically in a file with a .car extension. The CAR format is a serialized + representation of any IPLD DAG (graph) as the concatenation of its blocks, plus + a header that describes the graphs in the file (via root CIDs). + + See: https://github.com/ipld/specs/blob/master/block-layer/content-addressable-archives.md + + Packing files into a .car + + # write a content addressed archive to the current working dir. + $ ipfs-car --pack path/to/file/or/dir + + # specify the car file name. + $ ipfs-car --pack path/to/files --output path/to/write/a.car + + # pack files without wrapping with top-level directory + $ ipfs-car --wrapWithDirectory false --pack path/to/files --output path/to/write/a.car + + Unpacking files from a .car + + # write 1 or more files to the current working dir. + $ ipfs-car --unpack path/to/my.car + + # unpack files to a specific path. + $ ipfs-car --unpack path/to/my.car --output /path/to/unpack/files/to + + # unpack specific roots + $ ipfs-car --unpack path/to/my.car --root [--root ] + + # unpack files from a .car on stdin. + $ cat path/to/my.car | ipfs-car --unpack + + Listing the contents of a .car + + # list the cids for all the blocks. + $ ipfs-car --list-cids path/to/my.car + + # list the cid roots. + $ ipfs-car --list-roots path/to/my.car + + # list the files. + $ ipfs-car --list path/to/my.car + + TL;DR + --pack --output + --unpack --output + +`, options); +async function handleInput({ flags }) { + if (flags.pack) { + const { root, filename } = await packToFs({ input: flags.pack, output: flags.output, wrapWithDirectory: flags.wrapWithDirectory }); + // tslint:disable-next-line: no-console + console.log(`root CID: ${root.toString()}`); + // tslint:disable-next-line: no-console + console.log(` output: ${filename}`); + } + else if (flags.unpack !== undefined) { + const roots = (flags.root || []).map(r => CID.parse(r)); + if (flags.unpack === '') { + return unpackStreamToFs({ input: process.stdin, roots, output: flags.output }); + } + return unpackToFs({ input: flags.unpack, roots, output: flags.output }); + } + else if (flags.list) { + return listFilesInCar({ input: flags.list }); + } + else if (flags.listRoots) { + return listRootsInCar({ input: flags.listRoots }); + } + else if (flags.listCids) { + return listCidsInCar({ input: flags.listCids }); + } + else if (!process.stdin.isTTY) { + // maybe stream? + // tslint:disable-next-line: no-console + console.log('Reading .car from stdin'); + return unpackStreamToFs({ input: process.stdin, output: flags.output }); + } + else { + cli.showHelp(); + throw new Error('--pack or --unpack flag required'); + } +} +handleInput(cli); diff --git a/dist/esm/cli/lib.js b/dist/esm/cli/lib.js new file mode 100644 index 0000000..c5fd6bf --- /dev/null +++ b/dist/esm/cli/lib.js @@ -0,0 +1,25 @@ +import fs from 'fs'; +import { CarIndexedReader, CarCIDIterator } from '@ipld/car'; +import { unpack } from "../unpack.js"; +export async function listFilesInCar({ input }) { + const carReader = await CarIndexedReader.fromFile(input); + for await (const file of unpack(carReader)) { + // tslint:disable-next-line: no-console + console.log(file.path); + } +} +export async function listCidsInCar({ input }) { + const carIterator = await CarCIDIterator.fromIterable(fs.createReadStream(input)); + for await (const cid of carIterator) { + // tslint:disable-next-line: no-console + console.log(cid.toString()); + } +} +export async function listRootsInCar({ input }) { + const carIterator = await CarCIDIterator.fromIterable(fs.createReadStream(input)); + const roots = await carIterator.getRoots(); + for (const root of roots) { + // tslint:disable-next-line: no-console + console.log(root.toString()); + } +} diff --git a/dist/esm/pack/blob.js b/dist/esm/pack/blob.js new file mode 100644 index 0000000..850e43e --- /dev/null +++ b/dist/esm/pack/blob.js @@ -0,0 +1,25 @@ +import { Blob } from "@web-std/blob"; +import all from 'it-all'; +import { MemoryBlockStore } from "../blockstore/memory.js"; +import { pack } from "./index.js"; +export async function packToBlob({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { + const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore(); + const { root, out } = await pack({ + input, + blockstore, + hasher, + maxChunkSize, + maxChildrenPerNode, + wrapWithDirectory, + rawLeaves + }); + const carParts = await all(out); + if (!userBlockstore) { + await blockstore.close(); + } + const car = new Blob(carParts, { + // https://www.iana.org/assignments/media-types/application/vnd.ipld.car + type: 'application/vnd.ipld.car', + }); + return { root, car }; +} diff --git a/dist/esm/pack/constants.js b/dist/esm/pack/constants.js new file mode 100644 index 0000000..35e1a72 --- /dev/null +++ b/dist/esm/pack/constants.js @@ -0,0 +1,10 @@ +import { sha256 } from 'multiformats/hashes/sha2'; +export const unixfsImporterOptionsDefault = { + cidVersion: 1, + chunker: 'fixed', + maxChunkSize: 262144, + hasher: sha256, + rawLeaves: true, + wrapWithDirectory: true, + maxChildrenPerNode: 174 +}; diff --git a/dist/esm/pack/fs.js b/dist/esm/pack/fs.js new file mode 100644 index 0000000..daa767b --- /dev/null +++ b/dist/esm/pack/fs.js @@ -0,0 +1,32 @@ +import fs from 'fs'; +import os from 'os'; +import path from 'path'; +import moveFile from 'move-file'; +import { packToStream } from "./stream.js"; +import { FsBlockStore } from "../blockstore/fs.js"; +export async function packToFs({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { + const blockstore = userBlockstore ? userBlockstore : new FsBlockStore(); + const location = output || `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; + const writable = fs.createWriteStream(location); + const { root } = await packToStream({ + input, + writable, + blockstore, + hasher, + maxChunkSize, + maxChildrenPerNode, + wrapWithDirectory, + rawLeaves + }); + if (!userBlockstore) { + await blockstore.close(); + } + // Move to work dir + if (!output) { + const basename = typeof input === 'string' ? path.parse(path.basename(input)).name : root.toString(); + const filename = `${basename}.car`; + await moveFile(location, `${process.cwd()}/${filename}`); + return { root, filename }; + } + return { root, filename: output }; +} diff --git a/dist/esm/pack/index.js b/dist/esm/pack/index.js new file mode 100644 index 0000000..e8d3de2 --- /dev/null +++ b/dist/esm/pack/index.js @@ -0,0 +1,60 @@ +import last from 'it-last'; +import pipe from 'it-pipe'; +import { CarWriter } from '@ipld/car'; +import { importer } from 'ipfs-unixfs-importer'; +import { getNormaliser } from "./utils/normalise-input.js"; +import { MemoryBlockStore } from "../blockstore/memory.js"; +import { unixfsImporterOptionsDefault } from "./constants.js"; +export async function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { + if (!input || (Array.isArray(input) && !input.length)) { + throw new Error('missing input file(s)'); + } + const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore(); + // Consume the source + const rootEntry = await last(pipe(getNormaliser(input), (source) => importer(source, blockstore, { + ...unixfsImporterOptionsDefault, + hasher: hasher || unixfsImporterOptionsDefault.hasher, + maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, + maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, + wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion + }))); + if (!rootEntry || !rootEntry.cid) { + throw new Error('given input could not be parsed correctly'); + } + const root = rootEntry.cid; + const { writer, out: carOut } = await CarWriter.create([root]); + const carOutIter = carOut[Symbol.asyncIterator](); + let writingPromise; + const writeAll = async () => { + for await (const block of blockstore.blocks()) { + // `await` will block until all bytes in `carOut` are consumed by the user + // so we have backpressure here + await writer.put(block); + } + await writer.close(); + if (!userBlockstore) { + await blockstore.close(); + } + }; + const out = { + [Symbol.asyncIterator]() { + if (writingPromise != null) { + throw new Error('Multiple iterator not supported'); + } + // don't start writing until the user starts consuming the iterator + writingPromise = writeAll(); + return { + async next() { + const result = await carOutIter.next(); + if (result.done) { + await writingPromise; // any errors will propagate from here + } + return result; + } + }; + } + }; + return { root, out }; +} diff --git a/dist/esm/pack/stream.js b/dist/esm/pack/stream.js new file mode 100644 index 0000000..e861558 --- /dev/null +++ b/dist/esm/pack/stream.js @@ -0,0 +1,65 @@ +import fs from 'fs'; +import path from 'path'; +import { Readable } from 'stream'; +import last from 'it-last'; +import pipe from 'it-pipe'; +import { CarWriter } from '@ipld/car'; +import { importer } from 'ipfs-unixfs-importer'; +import { normaliseInput } from 'ipfs-core-utils/files/normalise-input-multiple'; +import globSource from 'ipfs-utils/src/files/glob-source.js'; +import { MemoryBlockStore } from "../blockstore/memory.js"; +import { unixfsImporterOptionsDefault } from "./constants.js"; +// Node version of toCar with Node Stream Writable +export async function packToStream({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { + if (!input || (Array.isArray(input) && !input.length)) { + throw new Error('given input could not be parsed correctly'); + } + input = typeof input === 'string' ? [input] : input; + const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore(); + // Consume the source + const rootEntry = await last(pipe(legacyGlobSource(input), source => normaliseInput(source), (source) => importer(source, blockstore, { + ...unixfsImporterOptionsDefault, + hasher: hasher || unixfsImporterOptionsDefault.hasher, + maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, + maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, + wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion + }))); + if (!rootEntry || !rootEntry.cid) { + throw new Error('given input could not be parsed correctly'); + } + const root = rootEntry.cid; + const { writer, out } = await CarWriter.create([root]); + Readable.from(out).pipe(writable); + for await (const block of blockstore.blocks()) { + await writer.put(block); + } + await writer.close(); + if (!userBlockstore) { + await blockstore.close(); + } + return { root }; +} +/** + * This function replicates the old behaviour of globSource to not introduce a + * breaking change. + * + * TODO: figure out what the breaking change will be. + */ +async function* legacyGlobSource(input) { + for await (const p of input) { + const resolvedPath = path.resolve(p); + const stat = await fs.promises.stat(resolvedPath); + const fileName = path.basename(resolvedPath); + if (stat.isDirectory()) { + yield { path: fileName }; + for await (const candidate of globSource(resolvedPath, '**/*')) { + yield { ...candidate, path: path.join(fileName, candidate.path) }; + } + } + else { + yield { path: fileName, content: fs.createReadStream(resolvedPath) }; + } + } +} diff --git a/dist/esm/pack/utils/normalise-input.js b/dist/esm/pack/utils/normalise-input.js new file mode 100644 index 0000000..0970645 --- /dev/null +++ b/dist/esm/pack/utils/normalise-input.js @@ -0,0 +1,24 @@ +import { normaliseInput as normaliseInputSingle } from 'ipfs-core-utils/files/normalise-input-single'; +import { normaliseInput as normaliseInputMultiple } from 'ipfs-core-utils/files/normalise-input-multiple'; +function isBytes(obj) { + return ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer; +} +function isBlob(obj) { + return Boolean(obj.constructor) && + (obj.constructor.name === 'Blob' || obj.constructor.name === 'File') && + typeof obj.stream === 'function'; +} +function isSingle(input) { + return typeof input === 'string' || input instanceof String || isBytes(input) || isBlob(input) || '_readableState' in input; +} +/** + * Get a single or multiple normaliser depending on the input. + */ +export function getNormaliser(input) { + if (isSingle(input)) { + return normaliseInputSingle(input); + } + else { + return normaliseInputMultiple(input); + } +} diff --git a/dist/esm/package.json b/dist/esm/package.json new file mode 100644 index 0000000..4c32b01 --- /dev/null +++ b/dist/esm/package.json @@ -0,0 +1 @@ +'{ "type" : "module" }' diff --git a/dist/esm/unpack/fs.js b/dist/esm/unpack/fs.js new file mode 100644 index 0000000..5c2ece1 --- /dev/null +++ b/dist/esm/unpack/fs.js @@ -0,0 +1,42 @@ +import fs from 'fs'; +import pipe from 'it-pipe'; +import { map } from 'streaming-iterables'; +import { CarIndexedReader } from '@ipld/car'; +import { FsBlockStore } from "../blockstore/fs.js"; +// @ts-ignore stream-to-it has no types exported +import toIterable from 'stream-to-it'; +import { unpack, unpackStream } from "./index.js"; +// Node only, read a car from fs, write files to fs +export async function unpackToFs({ input, roots, output }) { + const carReader = await CarIndexedReader.fromFile(input); + await writeFiles(unpack(carReader, roots), output); +} +// Node only, read a stream, write files to fs +export async function unpackStreamToFs({ input, roots, output, blockstore: userBlockstore }) { + const blockstore = userBlockstore ? userBlockstore : new FsBlockStore(); + await writeFiles(unpackStream(input, { roots, blockstore }), output); + if (!userBlockstore) { + await blockstore.close(); + } +} +export async function writeFiles(source, output) { + for await (const file of source) { + let filePath = file.path; + // output overrides the first part of the path. + if (output) { + const parts = file.path.split('/'); + parts[0] = output; + filePath = parts.join('/'); + } + if (file.type === 'file' || file.type === 'raw') { + await pipe(file.content, map((chunk) => chunk.slice()), // BufferList to Buffer + toIterable.sink(fs.createWriteStream(filePath))); + } + else if (file.type === 'directory') { + await fs.promises.mkdir(filePath, { recursive: true }); + } + else { + throw new Error(`Unsupported UnixFS type ${file.type} for ${file.path}`); + } + } +} diff --git a/dist/esm/unpack/index.js b/dist/esm/unpack/index.js new file mode 100644 index 0000000..184721c --- /dev/null +++ b/dist/esm/unpack/index.js @@ -0,0 +1,39 @@ +import toIterable from 'browser-readablestream-to-it'; +import { CarBlockIterator } from '@ipld/car/iterator'; +import { recursive as unixFsExporter } from 'ipfs-unixfs-exporter'; +import { VerifyingGetOnlyBlockStore } from "./utils/verifying-get-only-blockstore.js"; +import { MemoryBlockStore } from "../blockstore/memory.js"; +// Export unixfs entries from car file +export async function* unpack(carReader, roots) { + const verifyingBlockService = VerifyingGetOnlyBlockStore.fromCarReader(carReader); + if (!roots || roots.length === 0) { + roots = await carReader.getRoots(); + } + for (const root of roots) { + yield* unixFsExporter(root, verifyingBlockService, { /* options */}); + } +} +export async function* unpackStream(readable, { roots, blockstore: userBlockstore } = {}) { + const carIterator = await CarBlockIterator.fromIterable(asAsyncIterable(readable)); + const blockstore = userBlockstore || new MemoryBlockStore(); + for await (const block of carIterator) { + await blockstore.put(block.cid, block.bytes); + } + const verifyingBlockStore = VerifyingGetOnlyBlockStore.fromBlockstore(blockstore); + if (!roots || roots.length === 0) { + roots = await carIterator.getRoots(); + } + for (const root of roots) { + yield* unixFsExporter(root, verifyingBlockStore); + } +} +/** + * Upgrade a ReadableStream to an AsyncIterable if it isn't already + * + * ReadableStream (e.g res.body) is asyncIterable in node, but not in chrome, yet. + * see: https://bugs.chromium.org/p/chromium/issues/detail?id=929585 + */ +function asAsyncIterable(readable) { + // @ts-ignore how to convince tsc that we are checking the type here? + return Symbol.asyncIterator in readable ? readable : toIterable(readable); +} diff --git a/dist/esm/unpack/utils/verifying-get-only-blockstore.js b/dist/esm/unpack/utils/verifying-get-only-blockstore.js new file mode 100644 index 0000000..4cf7856 --- /dev/null +++ b/dist/esm/unpack/utils/verifying-get-only-blockstore.js @@ -0,0 +1,35 @@ +import { equals } from 'uint8arrays/equals'; +import { sha256 } from 'multiformats/hashes/sha2'; +import { BaseBlockstore } from 'blockstore-core'; +export class VerifyingGetOnlyBlockStore extends BaseBlockstore { + constructor(blockstore) { + super(); + this.store = blockstore; + } + async get(cid) { + const res = await this.store.get(cid); + if (!res) { + throw new Error(`Incomplete CAR. Block missing for CID ${cid}`); + } + if (!isValid({ cid, bytes: res })) { + throw new Error(`Invalid CAR. Hash of block data does not match CID ${cid}`); + } + return res; + } + static fromBlockstore(b) { + return new VerifyingGetOnlyBlockStore(b); + } + static fromCarReader(cr) { + return new VerifyingGetOnlyBlockStore({ + // Return bytes in the same fashion as a Blockstore implementation + get: async (cid) => { + const block = await cr.get(cid); + return block === null || block === void 0 ? void 0 : block.bytes; + } + }); + } +} +async function isValid({ cid, bytes }) { + const hash = await sha256.digest(bytes); + return equals(hash.digest, cid.multihash.digest); +} diff --git a/dist/types/blockstore/fs.d.ts b/dist/types/blockstore/fs.d.ts new file mode 100644 index 0000000..aad6299 --- /dev/null +++ b/dist/types/blockstore/fs.d.ts @@ -0,0 +1,19 @@ +/// +import { CID } from 'multiformats'; +import { BaseBlockstore } from 'blockstore-core'; +import { Blockstore } from './index'; +export declare class FsBlockStore extends BaseBlockstore implements Blockstore { + path: string; + _opened: boolean; + _opening?: Promise; + constructor(); + _open(): Promise; + put(cid: CID, bytes: Uint8Array): Promise; + get(cid: CID): Promise; + has(cid: CID): Promise; + blocks(): AsyncGenerator<{ + cid: CID; + bytes: Buffer; + }, void, unknown>; + close(): Promise; +} diff --git a/dist/types/blockstore/idb.d.ts b/dist/types/blockstore/idb.d.ts new file mode 100644 index 0000000..94f35ea --- /dev/null +++ b/dist/types/blockstore/idb.d.ts @@ -0,0 +1,21 @@ +import { CID } from 'multiformats'; +import { BaseBlockstore } from 'blockstore-core'; +import { Blockstore } from './index'; +/** + * Save blocks to IndexedDB in the browser via idb-keyval + * Creates a probably unique indexed db per instance to ensure that the + * blocks iteration method only returns blocks from this invocation, + * and so that the caller can destory it without affecting others. + */ +export declare class IdbBlockStore extends BaseBlockstore implements Blockstore { + private store; + constructor(); + blocks(): AsyncGenerator<{ + cid: CID; + bytes: any; + }, void, unknown>; + put(cid: CID, bytes: Uint8Array): Promise; + get(cid: CID): Promise; + has(cid: CID): Promise; + close(): Promise; +} diff --git a/dist/types/blockstore/index.d.ts b/dist/types/blockstore/index.d.ts new file mode 100644 index 0000000..b4b96d2 --- /dev/null +++ b/dist/types/blockstore/index.d.ts @@ -0,0 +1,5 @@ +import { Block } from '@ipld/car/api'; +import type { Blockstore as IpfsBlockstore } from 'interface-blockstore'; +export interface Blockstore extends IpfsBlockstore { + blocks(): AsyncGenerator; +} diff --git a/dist/types/blockstore/memory.d.ts b/dist/types/blockstore/memory.d.ts new file mode 100644 index 0000000..104f17e --- /dev/null +++ b/dist/types/blockstore/memory.d.ts @@ -0,0 +1,15 @@ +import { CID } from 'multiformats'; +import { BaseBlockstore } from 'blockstore-core'; +import { Blockstore } from './index'; +export declare class MemoryBlockStore extends BaseBlockstore implements Blockstore { + store: Map; + constructor(); + blocks(): AsyncGenerator<{ + cid: CID; + bytes: Uint8Array; + }, void, unknown>; + put(cid: CID, bytes: Uint8Array): Promise; + get(cid: CID): Promise; + has(cid: CID): Promise; + close(): Promise; +} diff --git a/dist/types/cli/cli.d.ts b/dist/types/cli/cli.d.ts new file mode 100644 index 0000000..d1ebc9c --- /dev/null +++ b/dist/types/cli/cli.d.ts @@ -0,0 +1,2 @@ +#!/usr/bin/env node +export {}; diff --git a/dist/types/cli/lib.d.ts b/dist/types/cli/lib.d.ts new file mode 100644 index 0000000..f538aba --- /dev/null +++ b/dist/types/cli/lib.d.ts @@ -0,0 +1,9 @@ +export declare function listFilesInCar({ input }: { + input: string; +}): Promise; +export declare function listCidsInCar({ input }: { + input: string; +}): Promise; +export declare function listRootsInCar({ input }: { + input: string; +}): Promise; diff --git a/dist/types/pack/blob.d.ts b/dist/types/pack/blob.d.ts new file mode 100644 index 0000000..a296b45 --- /dev/null +++ b/dist/types/pack/blob.d.ts @@ -0,0 +1,7 @@ +import type { ImportCandidateStream } from 'ipfs-core-types/src/utils'; +export type { ImportCandidateStream }; +import type { PackProperties } from './index'; +export declare function packToBlob({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackProperties): Promise<{ + root: import("multiformats").CID; + car: Blob; +}>; diff --git a/dist/types/pack/constants.d.ts b/dist/types/pack/constants.d.ts new file mode 100644 index 0000000..7181c9e --- /dev/null +++ b/dist/types/pack/constants.d.ts @@ -0,0 +1,2 @@ +import type { UserImporterOptions } from 'ipfs-unixfs-importer/types/src/types'; +export declare const unixfsImporterOptionsDefault: UserImporterOptions; diff --git a/dist/types/pack/fs.d.ts b/dist/types/pack/fs.d.ts new file mode 100644 index 0000000..9ba1319 --- /dev/null +++ b/dist/types/pack/fs.d.ts @@ -0,0 +1,9 @@ +import type { PackProperties } from './index'; +export interface PackToFsProperties extends PackProperties { + input: string | Iterable | AsyncIterable; + output?: string; +} +export declare function packToFs({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackToFsProperties): Promise<{ + root: import("multiformats").CID; + filename: string; +}>; diff --git a/dist/types/pack/index.d.ts b/dist/types/pack/index.d.ts new file mode 100644 index 0000000..f3126e5 --- /dev/null +++ b/dist/types/pack/index.d.ts @@ -0,0 +1,22 @@ +import type { ImportCandidateStream, ImportCandidate } from 'ipfs-core-types/src/utils'; +import type { MultihashHasher } from 'multiformats/hashes/interface'; +export type { ImportCandidateStream }; +import { Blockstore } from '../blockstore/index'; +import { CIDVersion } from "multiformats/types/src/cid"; +export interface PackProperties { + input: ImportCandidateStream | ImportCandidate; + blockstore?: Blockstore; + maxChunkSize?: number; + maxChildrenPerNode?: number; + wrapWithDirectory?: boolean; + hasher?: MultihashHasher; + /** + * Use raw codec for leaf nodes. Default: true. + */ + rawLeaves?: boolean; + cidVersion?: CIDVersion | undefined; +} +export declare function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackProperties): Promise<{ + root: import("multiformats/types/src/cid").CID; + out: AsyncIterable; +}>; diff --git a/dist/types/pack/stream.d.ts b/dist/types/pack/stream.d.ts new file mode 100644 index 0000000..d7b3c98 --- /dev/null +++ b/dist/types/pack/stream.d.ts @@ -0,0 +1,10 @@ +/// +import { Writable } from 'stream'; +import type { PackProperties } from './index'; +export interface PackToStreamProperties extends PackProperties { + input: string | Iterable | AsyncIterable; + writable: Writable; +} +export declare function packToStream({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackToStreamProperties): Promise<{ + root: import("multiformats").CID; +}>; diff --git a/dist/types/pack/utils/normalise-input.d.ts b/dist/types/pack/utils/normalise-input.d.ts new file mode 100644 index 0000000..4cc83f0 --- /dev/null +++ b/dist/types/pack/utils/normalise-input.d.ts @@ -0,0 +1,5 @@ +import type { ImportCandidateStream, ImportCandidate } from 'ipfs-core-types/src/utils'; +/** + * Get a single or multiple normaliser depending on the input. + */ +export declare function getNormaliser(input: ImportCandidateStream | ImportCandidate): AsyncGenerator; diff --git a/dist/types/unpack/fs.d.ts b/dist/types/unpack/fs.d.ts new file mode 100644 index 0000000..29f9a13 --- /dev/null +++ b/dist/types/unpack/fs.d.ts @@ -0,0 +1,15 @@ +import { CID } from 'multiformats'; +import { UnixFSEntry } from 'ipfs-unixfs-exporter'; +import { Blockstore } from '../blockstore/index'; +export declare function unpackToFs({ input, roots, output }: { + input: string; + roots?: CID[]; + output?: string; +}): Promise; +export declare function unpackStreamToFs({ input, roots, output, blockstore: userBlockstore }: { + input: AsyncIterable; + roots?: CID[]; + output?: string; + blockstore?: Blockstore; +}): Promise; +export declare function writeFiles(source: AsyncIterable, output?: string): Promise; diff --git a/dist/types/unpack/index.d.ts b/dist/types/unpack/index.d.ts new file mode 100644 index 0000000..1e2fc2c --- /dev/null +++ b/dist/types/unpack/index.d.ts @@ -0,0 +1,10 @@ +import { CarReader } from '@ipld/car/api'; +import { CID } from 'multiformats'; +import type { UnixFSEntry } from 'ipfs-unixfs-exporter'; +export type { UnixFSEntry }; +import { Blockstore } from '../blockstore/index'; +export declare function unpack(carReader: CarReader, roots?: CID[]): AsyncIterable; +export declare function unpackStream(readable: ReadableStream | AsyncIterable, { roots, blockstore: userBlockstore }?: { + roots?: CID[]; + blockstore?: Blockstore; +}): AsyncIterable; diff --git a/dist/types/unpack/utils/verifying-get-only-blockstore.d.ts b/dist/types/unpack/utils/verifying-get-only-blockstore.d.ts new file mode 100644 index 0000000..05b0f5e --- /dev/null +++ b/dist/types/unpack/utils/verifying-get-only-blockstore.d.ts @@ -0,0 +1,15 @@ +import { CID } from 'multiformats'; +import { CarReader } from '@ipld/car/api'; +import { BaseBlockstore } from 'blockstore-core'; +import { Blockstore } from '../../blockstore/index'; +declare type verifyingBlockStore = { + get: (cid: CID) => Promise; +}; +export declare class VerifyingGetOnlyBlockStore extends BaseBlockstore { + store: verifyingBlockStore; + constructor(blockstore: verifyingBlockStore); + get(cid: CID): Promise; + static fromBlockstore(b: Blockstore): VerifyingGetOnlyBlockStore; + static fromCarReader(cr: CarReader): VerifyingGetOnlyBlockStore; +} +export {}; diff --git a/src/pack/index.ts b/src/pack/index.ts index c696cf2..6d83fc5 100644 --- a/src/pack/index.ts +++ b/src/pack/index.ts @@ -11,6 +11,7 @@ export type { ImportCandidateStream } import { Blockstore } from '../blockstore/index' import { MemoryBlockStore } from '../blockstore/memory' import { unixfsImporterOptionsDefault } from './constants' +import {CIDVersion} from "multiformats/types/src/cid"; export interface PackProperties { input: ImportCandidateStream | ImportCandidate, @@ -23,9 +24,10 @@ export interface PackProperties { * Use raw codec for leaf nodes. Default: true. */ rawLeaves?: boolean + cidVersion?: CIDVersion | undefined } -export async function pack ({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackProperties) { +export async function pack ({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackProperties) { if (!input || (Array.isArray(input) && !input.length)) { throw new Error('missing input file(s)') } @@ -41,7 +43,8 @@ export async function pack ({ input, blockstore: userBlockstore, hasher, maxChun maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion }) )) diff --git a/src/pack/stream.ts b/src/pack/stream.ts index 8306bb1..d025860 100644 --- a/src/pack/stream.ts +++ b/src/pack/stream.ts @@ -21,7 +21,7 @@ export interface PackToStreamProperties extends PackProperties { } // Node version of toCar with Node Stream Writable -export async function packToStream ({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackToStreamProperties) { +export async function packToStream ({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackToStreamProperties) { if (!input || (Array.isArray(input) && !input.length)) { throw new Error('given input could not be parsed correctly') } @@ -39,7 +39,8 @@ export async function packToStream ({ input, writable, blockstore: userBlockstor maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion }) )) From 1cf7db8d6268d83edbcdbed5b41f6b40917dac3a Mon Sep 17 00:00:00 2001 From: jtsmedley <38006759+jtsmedley@users.noreply.github.com> Date: Tue, 2 Aug 2022 12:06:46 -0500 Subject: [PATCH 2/3] Revert "Cid version support dist (#1)" This reverts commit dde1580e61756a8df7ff7c450e44d8b089eda7b8. --- .gitignore | 1 + dist/cjs/blockstore/fs.js | 76 ---------- dist/cjs/blockstore/idb.js | 65 --------- dist/cjs/blockstore/index.js | 2 - dist/cjs/blockstore/memory.js | 35 ----- dist/cjs/cli/cli.js | 131 ------------------ dist/cjs/cli/lib.js | 34 ----- dist/cjs/pack/blob.js | 32 ----- dist/cjs/pack/constants.js | 13 -- dist/cjs/pack/fs.js | 39 ------ dist/cjs/pack/index.js | 67 --------- dist/cjs/pack/stream.js | 72 ---------- dist/cjs/pack/utils/normalise-input.js | 28 ---- dist/cjs/unpack/fs.js | 51 ------- dist/cjs/unpack/index.js | 47 ------- .../utils/verifying-get-only-blockstore.js | 39 ------ dist/esm/blockstore/fs.js | 69 --------- dist/esm/blockstore/idb.js | 42 ------ dist/esm/blockstore/index.js | 1 - dist/esm/blockstore/memory.js | 31 ----- dist/esm/cli/cli.js | 126 ----------------- dist/esm/cli/lib.js | 25 ---- dist/esm/pack/blob.js | 25 ---- dist/esm/pack/constants.js | 10 -- dist/esm/pack/fs.js | 32 ----- dist/esm/pack/index.js | 60 -------- dist/esm/pack/stream.js | 65 --------- dist/esm/pack/utils/normalise-input.js | 24 ---- dist/esm/package.json | 1 - dist/esm/unpack/fs.js | 42 ------ dist/esm/unpack/index.js | 39 ------ .../utils/verifying-get-only-blockstore.js | 35 ----- dist/types/blockstore/fs.d.ts | 19 --- dist/types/blockstore/idb.d.ts | 21 --- dist/types/blockstore/index.d.ts | 5 - dist/types/blockstore/memory.d.ts | 15 -- dist/types/cli/cli.d.ts | 2 - dist/types/cli/lib.d.ts | 9 -- dist/types/pack/blob.d.ts | 7 - dist/types/pack/constants.d.ts | 2 - dist/types/pack/fs.d.ts | 9 -- dist/types/pack/index.d.ts | 22 --- dist/types/pack/stream.d.ts | 10 -- dist/types/pack/utils/normalise-input.d.ts | 5 - dist/types/unpack/fs.d.ts | 15 -- dist/types/unpack/index.d.ts | 10 -- .../utils/verifying-get-only-blockstore.d.ts | 15 -- src/pack/index.ts | 7 +- src/pack/stream.ts | 5 +- 49 files changed, 5 insertions(+), 1532 deletions(-) delete mode 100644 dist/cjs/blockstore/fs.js delete mode 100644 dist/cjs/blockstore/idb.js delete mode 100644 dist/cjs/blockstore/index.js delete mode 100644 dist/cjs/blockstore/memory.js delete mode 100644 dist/cjs/cli/cli.js delete mode 100644 dist/cjs/cli/lib.js delete mode 100644 dist/cjs/pack/blob.js delete mode 100644 dist/cjs/pack/constants.js delete mode 100644 dist/cjs/pack/fs.js delete mode 100644 dist/cjs/pack/index.js delete mode 100644 dist/cjs/pack/stream.js delete mode 100644 dist/cjs/pack/utils/normalise-input.js delete mode 100644 dist/cjs/unpack/fs.js delete mode 100644 dist/cjs/unpack/index.js delete mode 100644 dist/cjs/unpack/utils/verifying-get-only-blockstore.js delete mode 100644 dist/esm/blockstore/fs.js delete mode 100644 dist/esm/blockstore/idb.js delete mode 100644 dist/esm/blockstore/index.js delete mode 100644 dist/esm/blockstore/memory.js delete mode 100644 dist/esm/cli/cli.js delete mode 100644 dist/esm/cli/lib.js delete mode 100644 dist/esm/pack/blob.js delete mode 100644 dist/esm/pack/constants.js delete mode 100644 dist/esm/pack/fs.js delete mode 100644 dist/esm/pack/index.js delete mode 100644 dist/esm/pack/stream.js delete mode 100644 dist/esm/pack/utils/normalise-input.js delete mode 100644 dist/esm/package.json delete mode 100644 dist/esm/unpack/fs.js delete mode 100644 dist/esm/unpack/index.js delete mode 100644 dist/esm/unpack/utils/verifying-get-only-blockstore.js delete mode 100644 dist/types/blockstore/fs.d.ts delete mode 100644 dist/types/blockstore/idb.d.ts delete mode 100644 dist/types/blockstore/index.d.ts delete mode 100644 dist/types/blockstore/memory.d.ts delete mode 100644 dist/types/cli/cli.d.ts delete mode 100644 dist/types/cli/lib.d.ts delete mode 100644 dist/types/pack/blob.d.ts delete mode 100644 dist/types/pack/constants.d.ts delete mode 100644 dist/types/pack/fs.d.ts delete mode 100644 dist/types/pack/index.d.ts delete mode 100644 dist/types/pack/stream.d.ts delete mode 100644 dist/types/pack/utils/normalise-input.d.ts delete mode 100644 dist/types/unpack/fs.d.ts delete mode 100644 dist/types/unpack/index.d.ts delete mode 100644 dist/types/unpack/utils/verifying-get-only-blockstore.d.ts diff --git a/.gitignore b/.gitignore index adb2c19..6704566 100644 --- a/.gitignore +++ b/.gitignore @@ -80,6 +80,7 @@ typings/ # Nuxt.js build / generate output .nuxt +dist # Gatsby files .cache/ diff --git a/dist/cjs/blockstore/fs.js b/dist/cjs/blockstore/fs.js deleted file mode 100644 index e6d1d3d..0000000 --- a/dist/cjs/blockstore/fs.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.FsBlockStore = void 0; -const fs_1 = __importDefault(require("fs")); -const os_1 = __importDefault(require("os")); -const multiformats_1 = require("multiformats"); -const blockstore_core_1 = require("blockstore-core"); -class FsBlockStore extends blockstore_core_1.BaseBlockstore { - constructor() { - super(); - this.path = `${os_1.default.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; - this._opened = false; - } - async _open() { - if (this._opening) { - await this._opening; - } - else { - this._opening = fs_1.default.promises.mkdir(this.path); - await this._opening; - this._opened = true; - } - } - async put(cid, bytes) { - if (!this._opened) { - await this._open(); - } - const cidStr = cid.toString(); - const location = `${this.path}/${cidStr}`; - await fs_1.default.promises.writeFile(location, bytes); - } - async get(cid) { - if (!this._opened) { - await this._open(); - } - const cidStr = cid.toString(); - const location = `${this.path}/${cidStr}`; - const bytes = await fs_1.default.promises.readFile(location); - return bytes; - } - async has(cid) { - if (!this._opened) { - await this._open(); - } - const cidStr = cid.toString(); - const location = `${this.path}/${cidStr}`; - try { - await fs_1.default.promises.access(location); - return true; - } - catch (err) { - return false; - } - } - async *blocks() { - if (!this._opened) { - await this._open(); - } - const cids = await fs_1.default.promises.readdir(this.path); - for (const cidStr of cids) { - const location = `${this.path}/${cidStr}`; - const bytes = await fs_1.default.promises.readFile(location); - yield { cid: multiformats_1.CID.parse(cidStr), bytes }; - } - } - async close() { - if (this._opened) { - await fs_1.default.promises.rm(this.path, { recursive: true }); - } - this._opened = false; - } -} -exports.FsBlockStore = FsBlockStore; diff --git a/dist/cjs/blockstore/idb.js b/dist/cjs/blockstore/idb.js deleted file mode 100644 index 50ea8fb..0000000 --- a/dist/cjs/blockstore/idb.js +++ /dev/null @@ -1,65 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.IdbBlockStore = void 0; -const idb = __importStar(require("idb-keyval")); -const multiformats_1 = require("multiformats"); -const blockstore_core_1 = require("blockstore-core"); -/** - * Save blocks to IndexedDB in the browser via idb-keyval - * Creates a probably unique indexed db per instance to ensure that the - * blocks iteration method only returns blocks from this invocation, - * and so that the caller can destory it without affecting others. - */ -class IdbBlockStore extends blockstore_core_1.BaseBlockstore { - constructor() { - super(); - const dbName = `IdbBlockStore-${Date.now()}-${Math.random()}`; - this.store = idb.createStore(dbName, `IdbBlockStore`); - } - async *blocks() { - const keys = await idb.keys(this.store); - for await (const key of keys) { - yield { - cid: multiformats_1.CID.parse(key.toString()), - bytes: await idb.get(key, this.store) - }; - } - } - async put(cid, bytes) { - await idb.set(cid.toString(), bytes, this.store); - } - async get(cid) { - const bytes = await idb.get(cid.toString(), this.store); - if (!bytes) { - throw new Error(`block with cid ${cid.toString()} no found`); - } - return bytes; - } - async has(cid) { - const bytes = await idb.get(cid.toString(), this.store); - return Boolean(bytes); - } - async close() { - return idb.clear(this.store); - } -} -exports.IdbBlockStore = IdbBlockStore; diff --git a/dist/cjs/blockstore/index.js b/dist/cjs/blockstore/index.js deleted file mode 100644 index ce03781..0000000 --- a/dist/cjs/blockstore/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/dist/cjs/blockstore/memory.js b/dist/cjs/blockstore/memory.js deleted file mode 100644 index 63e3a2a..0000000 --- a/dist/cjs/blockstore/memory.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MemoryBlockStore = void 0; -const multiformats_1 = require("multiformats"); -const blockstore_core_1 = require("blockstore-core"); -class MemoryBlockStore extends blockstore_core_1.BaseBlockstore { - constructor() { - super(); - this.store = new Map(); - } - async *blocks() { - for (const [cidStr, bytes] of this.store.entries()) { - yield { cid: multiformats_1.CID.parse(cidStr), bytes }; - } - } - put(cid, bytes) { - this.store.set(cid.toString(), bytes); - return Promise.resolve(); - } - get(cid) { - const bytes = this.store.get(cid.toString()); - if (!bytes) { - throw new Error(`block with cid ${cid.toString()} no found`); - } - return Promise.resolve(bytes); - } - has(cid) { - return Promise.resolve(this.store.has(cid.toString())); - } - close() { - this.store.clear(); - return Promise.resolve(); - } -} -exports.MemoryBlockStore = MemoryBlockStore; diff --git a/dist/cjs/cli/cli.js b/dist/cjs/cli/cli.js deleted file mode 100644 index 549d27f..0000000 --- a/dist/cjs/cli/cli.js +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env node -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const meow_1 = __importDefault(require("meow")); -const multiformats_1 = require("multiformats"); -const fs_1 = require("../pack/fs"); -const fs_2 = require("../unpack/fs"); -const lib_1 = require("./lib"); -const options = { - flags: { - output: { - type: 'string', - alias: 'o', - }, - pack: { - type: 'string', - alias: 'p' - }, - unpack: { - type: 'string', - alias: 'u', - }, - root: { - type: 'string', - isMultiple: true - }, - list: { - type: 'string', - alias: 'l', - }, - listCids: { - type: 'string' - }, - listRoots: { - type: 'string' - }, - wrapWithDirectory: { - type: 'boolean', - alias: 'w', - default: true - } - } -}; -const cli = (0, meow_1.default)(` - Content Addressable archives (CAR) store IPLD block data as a sequence of bytes; - typically in a file with a .car extension. The CAR format is a serialized - representation of any IPLD DAG (graph) as the concatenation of its blocks, plus - a header that describes the graphs in the file (via root CIDs). - - See: https://github.com/ipld/specs/blob/master/block-layer/content-addressable-archives.md - - Packing files into a .car - - # write a content addressed archive to the current working dir. - $ ipfs-car --pack path/to/file/or/dir - - # specify the car file name. - $ ipfs-car --pack path/to/files --output path/to/write/a.car - - # pack files without wrapping with top-level directory - $ ipfs-car --wrapWithDirectory false --pack path/to/files --output path/to/write/a.car - - Unpacking files from a .car - - # write 1 or more files to the current working dir. - $ ipfs-car --unpack path/to/my.car - - # unpack files to a specific path. - $ ipfs-car --unpack path/to/my.car --output /path/to/unpack/files/to - - # unpack specific roots - $ ipfs-car --unpack path/to/my.car --root [--root ] - - # unpack files from a .car on stdin. - $ cat path/to/my.car | ipfs-car --unpack - - Listing the contents of a .car - - # list the cids for all the blocks. - $ ipfs-car --list-cids path/to/my.car - - # list the cid roots. - $ ipfs-car --list-roots path/to/my.car - - # list the files. - $ ipfs-car --list path/to/my.car - - TL;DR - --pack --output - --unpack --output - -`, options); -async function handleInput({ flags }) { - if (flags.pack) { - const { root, filename } = await (0, fs_1.packToFs)({ input: flags.pack, output: flags.output, wrapWithDirectory: flags.wrapWithDirectory }); - // tslint:disable-next-line: no-console - console.log(`root CID: ${root.toString()}`); - // tslint:disable-next-line: no-console - console.log(` output: ${filename}`); - } - else if (flags.unpack !== undefined) { - const roots = (flags.root || []).map(r => multiformats_1.CID.parse(r)); - if (flags.unpack === '') { - return (0, fs_2.unpackStreamToFs)({ input: process.stdin, roots, output: flags.output }); - } - return (0, fs_2.unpackToFs)({ input: flags.unpack, roots, output: flags.output }); - } - else if (flags.list) { - return (0, lib_1.listFilesInCar)({ input: flags.list }); - } - else if (flags.listRoots) { - return (0, lib_1.listRootsInCar)({ input: flags.listRoots }); - } - else if (flags.listCids) { - return (0, lib_1.listCidsInCar)({ input: flags.listCids }); - } - else if (!process.stdin.isTTY) { - // maybe stream? - // tslint:disable-next-line: no-console - console.log('Reading .car from stdin'); - return (0, fs_2.unpackStreamToFs)({ input: process.stdin, output: flags.output }); - } - else { - cli.showHelp(); - throw new Error('--pack or --unpack flag required'); - } -} -handleInput(cli); diff --git a/dist/cjs/cli/lib.js b/dist/cjs/cli/lib.js deleted file mode 100644 index a1429d9..0000000 --- a/dist/cjs/cli/lib.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.listRootsInCar = exports.listCidsInCar = exports.listFilesInCar = void 0; -const fs_1 = __importDefault(require("fs")); -const car_1 = require("@ipld/car"); -const unpack_1 = require("../unpack"); -async function listFilesInCar({ input }) { - const carReader = await car_1.CarIndexedReader.fromFile(input); - for await (const file of (0, unpack_1.unpack)(carReader)) { - // tslint:disable-next-line: no-console - console.log(file.path); - } -} -exports.listFilesInCar = listFilesInCar; -async function listCidsInCar({ input }) { - const carIterator = await car_1.CarCIDIterator.fromIterable(fs_1.default.createReadStream(input)); - for await (const cid of carIterator) { - // tslint:disable-next-line: no-console - console.log(cid.toString()); - } -} -exports.listCidsInCar = listCidsInCar; -async function listRootsInCar({ input }) { - const carIterator = await car_1.CarCIDIterator.fromIterable(fs_1.default.createReadStream(input)); - const roots = await carIterator.getRoots(); - for (const root of roots) { - // tslint:disable-next-line: no-console - console.log(root.toString()); - } -} -exports.listRootsInCar = listRootsInCar; diff --git a/dist/cjs/pack/blob.js b/dist/cjs/pack/blob.js deleted file mode 100644 index 9a32f44..0000000 --- a/dist/cjs/pack/blob.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.packToBlob = void 0; -const blob_1 = require("@web-std/blob"); -const it_all_1 = __importDefault(require("it-all")); -const memory_1 = require("../blockstore/memory"); -const index_1 = require("./index"); -async function packToBlob({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { - const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore(); - const { root, out } = await (0, index_1.pack)({ - input, - blockstore, - hasher, - maxChunkSize, - maxChildrenPerNode, - wrapWithDirectory, - rawLeaves - }); - const carParts = await (0, it_all_1.default)(out); - if (!userBlockstore) { - await blockstore.close(); - } - const car = new blob_1.Blob(carParts, { - // https://www.iana.org/assignments/media-types/application/vnd.ipld.car - type: 'application/vnd.ipld.car', - }); - return { root, car }; -} -exports.packToBlob = packToBlob; diff --git a/dist/cjs/pack/constants.js b/dist/cjs/pack/constants.js deleted file mode 100644 index 2c69cc8..0000000 --- a/dist/cjs/pack/constants.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.unixfsImporterOptionsDefault = void 0; -const sha2_1 = require("multiformats/hashes/sha2"); -exports.unixfsImporterOptionsDefault = { - cidVersion: 1, - chunker: 'fixed', - maxChunkSize: 262144, - hasher: sha2_1.sha256, - rawLeaves: true, - wrapWithDirectory: true, - maxChildrenPerNode: 174 -}; diff --git a/dist/cjs/pack/fs.js b/dist/cjs/pack/fs.js deleted file mode 100644 index 6f9c9b0..0000000 --- a/dist/cjs/pack/fs.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.packToFs = void 0; -const fs_1 = __importDefault(require("fs")); -const os_1 = __importDefault(require("os")); -const path_1 = __importDefault(require("path")); -const move_file_1 = __importDefault(require("move-file")); -const stream_1 = require("./stream"); -const fs_2 = require("../blockstore/fs"); -async function packToFs({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { - const blockstore = userBlockstore ? userBlockstore : new fs_2.FsBlockStore(); - const location = output || `${os_1.default.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; - const writable = fs_1.default.createWriteStream(location); - const { root } = await (0, stream_1.packToStream)({ - input, - writable, - blockstore, - hasher, - maxChunkSize, - maxChildrenPerNode, - wrapWithDirectory, - rawLeaves - }); - if (!userBlockstore) { - await blockstore.close(); - } - // Move to work dir - if (!output) { - const basename = typeof input === 'string' ? path_1.default.parse(path_1.default.basename(input)).name : root.toString(); - const filename = `${basename}.car`; - await (0, move_file_1.default)(location, `${process.cwd()}/${filename}`); - return { root, filename }; - } - return { root, filename: output }; -} -exports.packToFs = packToFs; diff --git a/dist/cjs/pack/index.js b/dist/cjs/pack/index.js deleted file mode 100644 index c1f7b6b..0000000 --- a/dist/cjs/pack/index.js +++ /dev/null @@ -1,67 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.pack = void 0; -const it_last_1 = __importDefault(require("it-last")); -const it_pipe_1 = __importDefault(require("it-pipe")); -const car_1 = require("@ipld/car"); -const ipfs_unixfs_importer_1 = require("ipfs-unixfs-importer"); -const normalise_input_1 = require("./utils/normalise-input"); -const memory_1 = require("../blockstore/memory"); -const constants_1 = require("./constants"); -async function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { - if (!input || (Array.isArray(input) && !input.length)) { - throw new Error('missing input file(s)'); - } - const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore(); - // Consume the source - const rootEntry = await (0, it_last_1.default)((0, it_pipe_1.default)((0, normalise_input_1.getNormaliser)(input), (source) => (0, ipfs_unixfs_importer_1.importer)(source, blockstore, { - ...constants_1.unixfsImporterOptionsDefault, - hasher: hasher || constants_1.unixfsImporterOptionsDefault.hasher, - maxChunkSize: maxChunkSize || constants_1.unixfsImporterOptionsDefault.maxChunkSize, - maxChildrenPerNode: maxChildrenPerNode || constants_1.unixfsImporterOptionsDefault.maxChildrenPerNode, - wrapWithDirectory: wrapWithDirectory === false ? false : constants_1.unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? constants_1.unixfsImporterOptionsDefault.rawLeaves : rawLeaves, - cidVersion: cidVersion || constants_1.unixfsImporterOptionsDefault.cidVersion - }))); - if (!rootEntry || !rootEntry.cid) { - throw new Error('given input could not be parsed correctly'); - } - const root = rootEntry.cid; - const { writer, out: carOut } = await car_1.CarWriter.create([root]); - const carOutIter = carOut[Symbol.asyncIterator](); - let writingPromise; - const writeAll = async () => { - for await (const block of blockstore.blocks()) { - // `await` will block until all bytes in `carOut` are consumed by the user - // so we have backpressure here - await writer.put(block); - } - await writer.close(); - if (!userBlockstore) { - await blockstore.close(); - } - }; - const out = { - [Symbol.asyncIterator]() { - if (writingPromise != null) { - throw new Error('Multiple iterator not supported'); - } - // don't start writing until the user starts consuming the iterator - writingPromise = writeAll(); - return { - async next() { - const result = await carOutIter.next(); - if (result.done) { - await writingPromise; // any errors will propagate from here - } - return result; - } - }; - } - }; - return { root, out }; -} -exports.pack = pack; diff --git a/dist/cjs/pack/stream.js b/dist/cjs/pack/stream.js deleted file mode 100644 index f704838..0000000 --- a/dist/cjs/pack/stream.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.packToStream = void 0; -const fs_1 = __importDefault(require("fs")); -const path_1 = __importDefault(require("path")); -const stream_1 = require("stream"); -const it_last_1 = __importDefault(require("it-last")); -const it_pipe_1 = __importDefault(require("it-pipe")); -const car_1 = require("@ipld/car"); -const ipfs_unixfs_importer_1 = require("ipfs-unixfs-importer"); -const normalise_input_multiple_1 = require("ipfs-core-utils/files/normalise-input-multiple"); -const glob_source_js_1 = __importDefault(require("ipfs-utils/src/files/glob-source.js")); -const memory_1 = require("../blockstore/memory"); -const constants_1 = require("./constants"); -// Node version of toCar with Node Stream Writable -async function packToStream({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { - if (!input || (Array.isArray(input) && !input.length)) { - throw new Error('given input could not be parsed correctly'); - } - input = typeof input === 'string' ? [input] : input; - const blockstore = userBlockstore ? userBlockstore : new memory_1.MemoryBlockStore(); - // Consume the source - const rootEntry = await (0, it_last_1.default)((0, it_pipe_1.default)(legacyGlobSource(input), source => (0, normalise_input_multiple_1.normaliseInput)(source), (source) => (0, ipfs_unixfs_importer_1.importer)(source, blockstore, { - ...constants_1.unixfsImporterOptionsDefault, - hasher: hasher || constants_1.unixfsImporterOptionsDefault.hasher, - maxChunkSize: maxChunkSize || constants_1.unixfsImporterOptionsDefault.maxChunkSize, - maxChildrenPerNode: maxChildrenPerNode || constants_1.unixfsImporterOptionsDefault.maxChildrenPerNode, - wrapWithDirectory: wrapWithDirectory === false ? false : constants_1.unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? constants_1.unixfsImporterOptionsDefault.rawLeaves : rawLeaves, - cidVersion: cidVersion || constants_1.unixfsImporterOptionsDefault.cidVersion - }))); - if (!rootEntry || !rootEntry.cid) { - throw new Error('given input could not be parsed correctly'); - } - const root = rootEntry.cid; - const { writer, out } = await car_1.CarWriter.create([root]); - stream_1.Readable.from(out).pipe(writable); - for await (const block of blockstore.blocks()) { - await writer.put(block); - } - await writer.close(); - if (!userBlockstore) { - await blockstore.close(); - } - return { root }; -} -exports.packToStream = packToStream; -/** - * This function replicates the old behaviour of globSource to not introduce a - * breaking change. - * - * TODO: figure out what the breaking change will be. - */ -async function* legacyGlobSource(input) { - for await (const p of input) { - const resolvedPath = path_1.default.resolve(p); - const stat = await fs_1.default.promises.stat(resolvedPath); - const fileName = path_1.default.basename(resolvedPath); - if (stat.isDirectory()) { - yield { path: fileName }; - for await (const candidate of (0, glob_source_js_1.default)(resolvedPath, '**/*')) { - yield { ...candidate, path: path_1.default.join(fileName, candidate.path) }; - } - } - else { - yield { path: fileName, content: fs_1.default.createReadStream(resolvedPath) }; - } - } -} diff --git a/dist/cjs/pack/utils/normalise-input.js b/dist/cjs/pack/utils/normalise-input.js deleted file mode 100644 index 020dc53..0000000 --- a/dist/cjs/pack/utils/normalise-input.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getNormaliser = void 0; -const normalise_input_single_1 = require("ipfs-core-utils/files/normalise-input-single"); -const normalise_input_multiple_1 = require("ipfs-core-utils/files/normalise-input-multiple"); -function isBytes(obj) { - return ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer; -} -function isBlob(obj) { - return Boolean(obj.constructor) && - (obj.constructor.name === 'Blob' || obj.constructor.name === 'File') && - typeof obj.stream === 'function'; -} -function isSingle(input) { - return typeof input === 'string' || input instanceof String || isBytes(input) || isBlob(input) || '_readableState' in input; -} -/** - * Get a single or multiple normaliser depending on the input. - */ -function getNormaliser(input) { - if (isSingle(input)) { - return (0, normalise_input_single_1.normaliseInput)(input); - } - else { - return (0, normalise_input_multiple_1.normaliseInput)(input); - } -} -exports.getNormaliser = getNormaliser; diff --git a/dist/cjs/unpack/fs.js b/dist/cjs/unpack/fs.js deleted file mode 100644 index 39efa0f..0000000 --- a/dist/cjs/unpack/fs.js +++ /dev/null @@ -1,51 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.writeFiles = exports.unpackStreamToFs = exports.unpackToFs = void 0; -const fs_1 = __importDefault(require("fs")); -const it_pipe_1 = __importDefault(require("it-pipe")); -const streaming_iterables_1 = require("streaming-iterables"); -const car_1 = require("@ipld/car"); -const fs_2 = require("../blockstore/fs"); -// @ts-ignore stream-to-it has no types exported -const stream_to_it_1 = __importDefault(require("stream-to-it")); -const index_1 = require("./index"); -// Node only, read a car from fs, write files to fs -async function unpackToFs({ input, roots, output }) { - const carReader = await car_1.CarIndexedReader.fromFile(input); - await writeFiles((0, index_1.unpack)(carReader, roots), output); -} -exports.unpackToFs = unpackToFs; -// Node only, read a stream, write files to fs -async function unpackStreamToFs({ input, roots, output, blockstore: userBlockstore }) { - const blockstore = userBlockstore ? userBlockstore : new fs_2.FsBlockStore(); - await writeFiles((0, index_1.unpackStream)(input, { roots, blockstore }), output); - if (!userBlockstore) { - await blockstore.close(); - } -} -exports.unpackStreamToFs = unpackStreamToFs; -async function writeFiles(source, output) { - for await (const file of source) { - let filePath = file.path; - // output overrides the first part of the path. - if (output) { - const parts = file.path.split('/'); - parts[0] = output; - filePath = parts.join('/'); - } - if (file.type === 'file' || file.type === 'raw') { - await (0, it_pipe_1.default)(file.content, (0, streaming_iterables_1.map)((chunk) => chunk.slice()), // BufferList to Buffer - stream_to_it_1.default.sink(fs_1.default.createWriteStream(filePath))); - } - else if (file.type === 'directory') { - await fs_1.default.promises.mkdir(filePath, { recursive: true }); - } - else { - throw new Error(`Unsupported UnixFS type ${file.type} for ${file.path}`); - } - } -} -exports.writeFiles = writeFiles; diff --git a/dist/cjs/unpack/index.js b/dist/cjs/unpack/index.js deleted file mode 100644 index a6132a9..0000000 --- a/dist/cjs/unpack/index.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.unpackStream = exports.unpack = void 0; -const browser_readablestream_to_it_1 = __importDefault(require("browser-readablestream-to-it")); -const iterator_1 = require("@ipld/car/iterator"); -const ipfs_unixfs_exporter_1 = require("ipfs-unixfs-exporter"); -const verifying_get_only_blockstore_1 = require("./utils/verifying-get-only-blockstore"); -const memory_1 = require("../blockstore/memory"); -// Export unixfs entries from car file -async function* unpack(carReader, roots) { - const verifyingBlockService = verifying_get_only_blockstore_1.VerifyingGetOnlyBlockStore.fromCarReader(carReader); - if (!roots || roots.length === 0) { - roots = await carReader.getRoots(); - } - for (const root of roots) { - yield* (0, ipfs_unixfs_exporter_1.recursive)(root, verifyingBlockService, { /* options */}); - } -} -exports.unpack = unpack; -async function* unpackStream(readable, { roots, blockstore: userBlockstore } = {}) { - const carIterator = await iterator_1.CarBlockIterator.fromIterable(asAsyncIterable(readable)); - const blockstore = userBlockstore || new memory_1.MemoryBlockStore(); - for await (const block of carIterator) { - await blockstore.put(block.cid, block.bytes); - } - const verifyingBlockStore = verifying_get_only_blockstore_1.VerifyingGetOnlyBlockStore.fromBlockstore(blockstore); - if (!roots || roots.length === 0) { - roots = await carIterator.getRoots(); - } - for (const root of roots) { - yield* (0, ipfs_unixfs_exporter_1.recursive)(root, verifyingBlockStore); - } -} -exports.unpackStream = unpackStream; -/** - * Upgrade a ReadableStream to an AsyncIterable if it isn't already - * - * ReadableStream (e.g res.body) is asyncIterable in node, but not in chrome, yet. - * see: https://bugs.chromium.org/p/chromium/issues/detail?id=929585 - */ -function asAsyncIterable(readable) { - // @ts-ignore how to convince tsc that we are checking the type here? - return Symbol.asyncIterator in readable ? readable : (0, browser_readablestream_to_it_1.default)(readable); -} diff --git a/dist/cjs/unpack/utils/verifying-get-only-blockstore.js b/dist/cjs/unpack/utils/verifying-get-only-blockstore.js deleted file mode 100644 index 0fc0b4c..0000000 --- a/dist/cjs/unpack/utils/verifying-get-only-blockstore.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.VerifyingGetOnlyBlockStore = void 0; -const equals_1 = require("uint8arrays/equals"); -const sha2_1 = require("multiformats/hashes/sha2"); -const blockstore_core_1 = require("blockstore-core"); -class VerifyingGetOnlyBlockStore extends blockstore_core_1.BaseBlockstore { - constructor(blockstore) { - super(); - this.store = blockstore; - } - async get(cid) { - const res = await this.store.get(cid); - if (!res) { - throw new Error(`Incomplete CAR. Block missing for CID ${cid}`); - } - if (!isValid({ cid, bytes: res })) { - throw new Error(`Invalid CAR. Hash of block data does not match CID ${cid}`); - } - return res; - } - static fromBlockstore(b) { - return new VerifyingGetOnlyBlockStore(b); - } - static fromCarReader(cr) { - return new VerifyingGetOnlyBlockStore({ - // Return bytes in the same fashion as a Blockstore implementation - get: async (cid) => { - const block = await cr.get(cid); - return block === null || block === void 0 ? void 0 : block.bytes; - } - }); - } -} -exports.VerifyingGetOnlyBlockStore = VerifyingGetOnlyBlockStore; -async function isValid({ cid, bytes }) { - const hash = await sha2_1.sha256.digest(bytes); - return (0, equals_1.equals)(hash.digest, cid.multihash.digest); -} diff --git a/dist/esm/blockstore/fs.js b/dist/esm/blockstore/fs.js deleted file mode 100644 index cdb0540..0000000 --- a/dist/esm/blockstore/fs.js +++ /dev/null @@ -1,69 +0,0 @@ -import fs from 'fs'; -import os from 'os'; -import { CID } from 'multiformats'; -import { BaseBlockstore } from 'blockstore-core'; -export class FsBlockStore extends BaseBlockstore { - constructor() { - super(); - this.path = `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; - this._opened = false; - } - async _open() { - if (this._opening) { - await this._opening; - } - else { - this._opening = fs.promises.mkdir(this.path); - await this._opening; - this._opened = true; - } - } - async put(cid, bytes) { - if (!this._opened) { - await this._open(); - } - const cidStr = cid.toString(); - const location = `${this.path}/${cidStr}`; - await fs.promises.writeFile(location, bytes); - } - async get(cid) { - if (!this._opened) { - await this._open(); - } - const cidStr = cid.toString(); - const location = `${this.path}/${cidStr}`; - const bytes = await fs.promises.readFile(location); - return bytes; - } - async has(cid) { - if (!this._opened) { - await this._open(); - } - const cidStr = cid.toString(); - const location = `${this.path}/${cidStr}`; - try { - await fs.promises.access(location); - return true; - } - catch (err) { - return false; - } - } - async *blocks() { - if (!this._opened) { - await this._open(); - } - const cids = await fs.promises.readdir(this.path); - for (const cidStr of cids) { - const location = `${this.path}/${cidStr}`; - const bytes = await fs.promises.readFile(location); - yield { cid: CID.parse(cidStr), bytes }; - } - } - async close() { - if (this._opened) { - await fs.promises.rm(this.path, { recursive: true }); - } - this._opened = false; - } -} diff --git a/dist/esm/blockstore/idb.js b/dist/esm/blockstore/idb.js deleted file mode 100644 index 89f34cf..0000000 --- a/dist/esm/blockstore/idb.js +++ /dev/null @@ -1,42 +0,0 @@ -import * as idb from 'idb-keyval'; -import { CID } from 'multiformats'; -import { BaseBlockstore } from 'blockstore-core'; -/** - * Save blocks to IndexedDB in the browser via idb-keyval - * Creates a probably unique indexed db per instance to ensure that the - * blocks iteration method only returns blocks from this invocation, - * and so that the caller can destory it without affecting others. - */ -export class IdbBlockStore extends BaseBlockstore { - constructor() { - super(); - const dbName = `IdbBlockStore-${Date.now()}-${Math.random()}`; - this.store = idb.createStore(dbName, `IdbBlockStore`); - } - async *blocks() { - const keys = await idb.keys(this.store); - for await (const key of keys) { - yield { - cid: CID.parse(key.toString()), - bytes: await idb.get(key, this.store) - }; - } - } - async put(cid, bytes) { - await idb.set(cid.toString(), bytes, this.store); - } - async get(cid) { - const bytes = await idb.get(cid.toString(), this.store); - if (!bytes) { - throw new Error(`block with cid ${cid.toString()} no found`); - } - return bytes; - } - async has(cid) { - const bytes = await idb.get(cid.toString(), this.store); - return Boolean(bytes); - } - async close() { - return idb.clear(this.store); - } -} diff --git a/dist/esm/blockstore/index.js b/dist/esm/blockstore/index.js deleted file mode 100644 index 509db18..0000000 --- a/dist/esm/blockstore/index.js +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/dist/esm/blockstore/memory.js b/dist/esm/blockstore/memory.js deleted file mode 100644 index aef5699..0000000 --- a/dist/esm/blockstore/memory.js +++ /dev/null @@ -1,31 +0,0 @@ -import { CID } from 'multiformats'; -import { BaseBlockstore } from 'blockstore-core'; -export class MemoryBlockStore extends BaseBlockstore { - constructor() { - super(); - this.store = new Map(); - } - async *blocks() { - for (const [cidStr, bytes] of this.store.entries()) { - yield { cid: CID.parse(cidStr), bytes }; - } - } - put(cid, bytes) { - this.store.set(cid.toString(), bytes); - return Promise.resolve(); - } - get(cid) { - const bytes = this.store.get(cid.toString()); - if (!bytes) { - throw new Error(`block with cid ${cid.toString()} no found`); - } - return Promise.resolve(bytes); - } - has(cid) { - return Promise.resolve(this.store.has(cid.toString())); - } - close() { - this.store.clear(); - return Promise.resolve(); - } -} diff --git a/dist/esm/cli/cli.js b/dist/esm/cli/cli.js deleted file mode 100644 index dbd1783..0000000 --- a/dist/esm/cli/cli.js +++ /dev/null @@ -1,126 +0,0 @@ -#!/usr/bin/env node -import meow from 'meow'; -import { CID } from 'multiformats'; -import { packToFs } from "../pack/fs.js"; -import { unpackToFs, unpackStreamToFs } from "../unpack/fs.js"; -import { listFilesInCar, listCidsInCar, listRootsInCar } from "./lib.js"; -const options = { - flags: { - output: { - type: 'string', - alias: 'o', - }, - pack: { - type: 'string', - alias: 'p' - }, - unpack: { - type: 'string', - alias: 'u', - }, - root: { - type: 'string', - isMultiple: true - }, - list: { - type: 'string', - alias: 'l', - }, - listCids: { - type: 'string' - }, - listRoots: { - type: 'string' - }, - wrapWithDirectory: { - type: 'boolean', - alias: 'w', - default: true - } - } -}; -const cli = meow(` - Content Addressable archives (CAR) store IPLD block data as a sequence of bytes; - typically in a file with a .car extension. The CAR format is a serialized - representation of any IPLD DAG (graph) as the concatenation of its blocks, plus - a header that describes the graphs in the file (via root CIDs). - - See: https://github.com/ipld/specs/blob/master/block-layer/content-addressable-archives.md - - Packing files into a .car - - # write a content addressed archive to the current working dir. - $ ipfs-car --pack path/to/file/or/dir - - # specify the car file name. - $ ipfs-car --pack path/to/files --output path/to/write/a.car - - # pack files without wrapping with top-level directory - $ ipfs-car --wrapWithDirectory false --pack path/to/files --output path/to/write/a.car - - Unpacking files from a .car - - # write 1 or more files to the current working dir. - $ ipfs-car --unpack path/to/my.car - - # unpack files to a specific path. - $ ipfs-car --unpack path/to/my.car --output /path/to/unpack/files/to - - # unpack specific roots - $ ipfs-car --unpack path/to/my.car --root [--root ] - - # unpack files from a .car on stdin. - $ cat path/to/my.car | ipfs-car --unpack - - Listing the contents of a .car - - # list the cids for all the blocks. - $ ipfs-car --list-cids path/to/my.car - - # list the cid roots. - $ ipfs-car --list-roots path/to/my.car - - # list the files. - $ ipfs-car --list path/to/my.car - - TL;DR - --pack --output - --unpack --output - -`, options); -async function handleInput({ flags }) { - if (flags.pack) { - const { root, filename } = await packToFs({ input: flags.pack, output: flags.output, wrapWithDirectory: flags.wrapWithDirectory }); - // tslint:disable-next-line: no-console - console.log(`root CID: ${root.toString()}`); - // tslint:disable-next-line: no-console - console.log(` output: ${filename}`); - } - else if (flags.unpack !== undefined) { - const roots = (flags.root || []).map(r => CID.parse(r)); - if (flags.unpack === '') { - return unpackStreamToFs({ input: process.stdin, roots, output: flags.output }); - } - return unpackToFs({ input: flags.unpack, roots, output: flags.output }); - } - else if (flags.list) { - return listFilesInCar({ input: flags.list }); - } - else if (flags.listRoots) { - return listRootsInCar({ input: flags.listRoots }); - } - else if (flags.listCids) { - return listCidsInCar({ input: flags.listCids }); - } - else if (!process.stdin.isTTY) { - // maybe stream? - // tslint:disable-next-line: no-console - console.log('Reading .car from stdin'); - return unpackStreamToFs({ input: process.stdin, output: flags.output }); - } - else { - cli.showHelp(); - throw new Error('--pack or --unpack flag required'); - } -} -handleInput(cli); diff --git a/dist/esm/cli/lib.js b/dist/esm/cli/lib.js deleted file mode 100644 index c5fd6bf..0000000 --- a/dist/esm/cli/lib.js +++ /dev/null @@ -1,25 +0,0 @@ -import fs from 'fs'; -import { CarIndexedReader, CarCIDIterator } from '@ipld/car'; -import { unpack } from "../unpack.js"; -export async function listFilesInCar({ input }) { - const carReader = await CarIndexedReader.fromFile(input); - for await (const file of unpack(carReader)) { - // tslint:disable-next-line: no-console - console.log(file.path); - } -} -export async function listCidsInCar({ input }) { - const carIterator = await CarCIDIterator.fromIterable(fs.createReadStream(input)); - for await (const cid of carIterator) { - // tslint:disable-next-line: no-console - console.log(cid.toString()); - } -} -export async function listRootsInCar({ input }) { - const carIterator = await CarCIDIterator.fromIterable(fs.createReadStream(input)); - const roots = await carIterator.getRoots(); - for (const root of roots) { - // tslint:disable-next-line: no-console - console.log(root.toString()); - } -} diff --git a/dist/esm/pack/blob.js b/dist/esm/pack/blob.js deleted file mode 100644 index 850e43e..0000000 --- a/dist/esm/pack/blob.js +++ /dev/null @@ -1,25 +0,0 @@ -import { Blob } from "@web-std/blob"; -import all from 'it-all'; -import { MemoryBlockStore } from "../blockstore/memory.js"; -import { pack } from "./index.js"; -export async function packToBlob({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { - const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore(); - const { root, out } = await pack({ - input, - blockstore, - hasher, - maxChunkSize, - maxChildrenPerNode, - wrapWithDirectory, - rawLeaves - }); - const carParts = await all(out); - if (!userBlockstore) { - await blockstore.close(); - } - const car = new Blob(carParts, { - // https://www.iana.org/assignments/media-types/application/vnd.ipld.car - type: 'application/vnd.ipld.car', - }); - return { root, car }; -} diff --git a/dist/esm/pack/constants.js b/dist/esm/pack/constants.js deleted file mode 100644 index 35e1a72..0000000 --- a/dist/esm/pack/constants.js +++ /dev/null @@ -1,10 +0,0 @@ -import { sha256 } from 'multiformats/hashes/sha2'; -export const unixfsImporterOptionsDefault = { - cidVersion: 1, - chunker: 'fixed', - maxChunkSize: 262144, - hasher: sha256, - rawLeaves: true, - wrapWithDirectory: true, - maxChildrenPerNode: 174 -}; diff --git a/dist/esm/pack/fs.js b/dist/esm/pack/fs.js deleted file mode 100644 index daa767b..0000000 --- a/dist/esm/pack/fs.js +++ /dev/null @@ -1,32 +0,0 @@ -import fs from 'fs'; -import os from 'os'; -import path from 'path'; -import moveFile from 'move-file'; -import { packToStream } from "./stream.js"; -import { FsBlockStore } from "../blockstore/fs.js"; -export async function packToFs({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }) { - const blockstore = userBlockstore ? userBlockstore : new FsBlockStore(); - const location = output || `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}`; - const writable = fs.createWriteStream(location); - const { root } = await packToStream({ - input, - writable, - blockstore, - hasher, - maxChunkSize, - maxChildrenPerNode, - wrapWithDirectory, - rawLeaves - }); - if (!userBlockstore) { - await blockstore.close(); - } - // Move to work dir - if (!output) { - const basename = typeof input === 'string' ? path.parse(path.basename(input)).name : root.toString(); - const filename = `${basename}.car`; - await moveFile(location, `${process.cwd()}/${filename}`); - return { root, filename }; - } - return { root, filename: output }; -} diff --git a/dist/esm/pack/index.js b/dist/esm/pack/index.js deleted file mode 100644 index e8d3de2..0000000 --- a/dist/esm/pack/index.js +++ /dev/null @@ -1,60 +0,0 @@ -import last from 'it-last'; -import pipe from 'it-pipe'; -import { CarWriter } from '@ipld/car'; -import { importer } from 'ipfs-unixfs-importer'; -import { getNormaliser } from "./utils/normalise-input.js"; -import { MemoryBlockStore } from "../blockstore/memory.js"; -import { unixfsImporterOptionsDefault } from "./constants.js"; -export async function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { - if (!input || (Array.isArray(input) && !input.length)) { - throw new Error('missing input file(s)'); - } - const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore(); - // Consume the source - const rootEntry = await last(pipe(getNormaliser(input), (source) => importer(source, blockstore, { - ...unixfsImporterOptionsDefault, - hasher: hasher || unixfsImporterOptionsDefault.hasher, - maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, - maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, - wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, - cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion - }))); - if (!rootEntry || !rootEntry.cid) { - throw new Error('given input could not be parsed correctly'); - } - const root = rootEntry.cid; - const { writer, out: carOut } = await CarWriter.create([root]); - const carOutIter = carOut[Symbol.asyncIterator](); - let writingPromise; - const writeAll = async () => { - for await (const block of blockstore.blocks()) { - // `await` will block until all bytes in `carOut` are consumed by the user - // so we have backpressure here - await writer.put(block); - } - await writer.close(); - if (!userBlockstore) { - await blockstore.close(); - } - }; - const out = { - [Symbol.asyncIterator]() { - if (writingPromise != null) { - throw new Error('Multiple iterator not supported'); - } - // don't start writing until the user starts consuming the iterator - writingPromise = writeAll(); - return { - async next() { - const result = await carOutIter.next(); - if (result.done) { - await writingPromise; // any errors will propagate from here - } - return result; - } - }; - } - }; - return { root, out }; -} diff --git a/dist/esm/pack/stream.js b/dist/esm/pack/stream.js deleted file mode 100644 index e861558..0000000 --- a/dist/esm/pack/stream.js +++ /dev/null @@ -1,65 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { Readable } from 'stream'; -import last from 'it-last'; -import pipe from 'it-pipe'; -import { CarWriter } from '@ipld/car'; -import { importer } from 'ipfs-unixfs-importer'; -import { normaliseInput } from 'ipfs-core-utils/files/normalise-input-multiple'; -import globSource from 'ipfs-utils/src/files/glob-source.js'; -import { MemoryBlockStore } from "../blockstore/memory.js"; -import { unixfsImporterOptionsDefault } from "./constants.js"; -// Node version of toCar with Node Stream Writable -export async function packToStream({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }) { - if (!input || (Array.isArray(input) && !input.length)) { - throw new Error('given input could not be parsed correctly'); - } - input = typeof input === 'string' ? [input] : input; - const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore(); - // Consume the source - const rootEntry = await last(pipe(legacyGlobSource(input), source => normaliseInput(source), (source) => importer(source, blockstore, { - ...unixfsImporterOptionsDefault, - hasher: hasher || unixfsImporterOptionsDefault.hasher, - maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, - maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, - wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, - cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion - }))); - if (!rootEntry || !rootEntry.cid) { - throw new Error('given input could not be parsed correctly'); - } - const root = rootEntry.cid; - const { writer, out } = await CarWriter.create([root]); - Readable.from(out).pipe(writable); - for await (const block of blockstore.blocks()) { - await writer.put(block); - } - await writer.close(); - if (!userBlockstore) { - await blockstore.close(); - } - return { root }; -} -/** - * This function replicates the old behaviour of globSource to not introduce a - * breaking change. - * - * TODO: figure out what the breaking change will be. - */ -async function* legacyGlobSource(input) { - for await (const p of input) { - const resolvedPath = path.resolve(p); - const stat = await fs.promises.stat(resolvedPath); - const fileName = path.basename(resolvedPath); - if (stat.isDirectory()) { - yield { path: fileName }; - for await (const candidate of globSource(resolvedPath, '**/*')) { - yield { ...candidate, path: path.join(fileName, candidate.path) }; - } - } - else { - yield { path: fileName, content: fs.createReadStream(resolvedPath) }; - } - } -} diff --git a/dist/esm/pack/utils/normalise-input.js b/dist/esm/pack/utils/normalise-input.js deleted file mode 100644 index 0970645..0000000 --- a/dist/esm/pack/utils/normalise-input.js +++ /dev/null @@ -1,24 +0,0 @@ -import { normaliseInput as normaliseInputSingle } from 'ipfs-core-utils/files/normalise-input-single'; -import { normaliseInput as normaliseInputMultiple } from 'ipfs-core-utils/files/normalise-input-multiple'; -function isBytes(obj) { - return ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer; -} -function isBlob(obj) { - return Boolean(obj.constructor) && - (obj.constructor.name === 'Blob' || obj.constructor.name === 'File') && - typeof obj.stream === 'function'; -} -function isSingle(input) { - return typeof input === 'string' || input instanceof String || isBytes(input) || isBlob(input) || '_readableState' in input; -} -/** - * Get a single or multiple normaliser depending on the input. - */ -export function getNormaliser(input) { - if (isSingle(input)) { - return normaliseInputSingle(input); - } - else { - return normaliseInputMultiple(input); - } -} diff --git a/dist/esm/package.json b/dist/esm/package.json deleted file mode 100644 index 4c32b01..0000000 --- a/dist/esm/package.json +++ /dev/null @@ -1 +0,0 @@ -'{ "type" : "module" }' diff --git a/dist/esm/unpack/fs.js b/dist/esm/unpack/fs.js deleted file mode 100644 index 5c2ece1..0000000 --- a/dist/esm/unpack/fs.js +++ /dev/null @@ -1,42 +0,0 @@ -import fs from 'fs'; -import pipe from 'it-pipe'; -import { map } from 'streaming-iterables'; -import { CarIndexedReader } from '@ipld/car'; -import { FsBlockStore } from "../blockstore/fs.js"; -// @ts-ignore stream-to-it has no types exported -import toIterable from 'stream-to-it'; -import { unpack, unpackStream } from "./index.js"; -// Node only, read a car from fs, write files to fs -export async function unpackToFs({ input, roots, output }) { - const carReader = await CarIndexedReader.fromFile(input); - await writeFiles(unpack(carReader, roots), output); -} -// Node only, read a stream, write files to fs -export async function unpackStreamToFs({ input, roots, output, blockstore: userBlockstore }) { - const blockstore = userBlockstore ? userBlockstore : new FsBlockStore(); - await writeFiles(unpackStream(input, { roots, blockstore }), output); - if (!userBlockstore) { - await blockstore.close(); - } -} -export async function writeFiles(source, output) { - for await (const file of source) { - let filePath = file.path; - // output overrides the first part of the path. - if (output) { - const parts = file.path.split('/'); - parts[0] = output; - filePath = parts.join('/'); - } - if (file.type === 'file' || file.type === 'raw') { - await pipe(file.content, map((chunk) => chunk.slice()), // BufferList to Buffer - toIterable.sink(fs.createWriteStream(filePath))); - } - else if (file.type === 'directory') { - await fs.promises.mkdir(filePath, { recursive: true }); - } - else { - throw new Error(`Unsupported UnixFS type ${file.type} for ${file.path}`); - } - } -} diff --git a/dist/esm/unpack/index.js b/dist/esm/unpack/index.js deleted file mode 100644 index 184721c..0000000 --- a/dist/esm/unpack/index.js +++ /dev/null @@ -1,39 +0,0 @@ -import toIterable from 'browser-readablestream-to-it'; -import { CarBlockIterator } from '@ipld/car/iterator'; -import { recursive as unixFsExporter } from 'ipfs-unixfs-exporter'; -import { VerifyingGetOnlyBlockStore } from "./utils/verifying-get-only-blockstore.js"; -import { MemoryBlockStore } from "../blockstore/memory.js"; -// Export unixfs entries from car file -export async function* unpack(carReader, roots) { - const verifyingBlockService = VerifyingGetOnlyBlockStore.fromCarReader(carReader); - if (!roots || roots.length === 0) { - roots = await carReader.getRoots(); - } - for (const root of roots) { - yield* unixFsExporter(root, verifyingBlockService, { /* options */}); - } -} -export async function* unpackStream(readable, { roots, blockstore: userBlockstore } = {}) { - const carIterator = await CarBlockIterator.fromIterable(asAsyncIterable(readable)); - const blockstore = userBlockstore || new MemoryBlockStore(); - for await (const block of carIterator) { - await blockstore.put(block.cid, block.bytes); - } - const verifyingBlockStore = VerifyingGetOnlyBlockStore.fromBlockstore(blockstore); - if (!roots || roots.length === 0) { - roots = await carIterator.getRoots(); - } - for (const root of roots) { - yield* unixFsExporter(root, verifyingBlockStore); - } -} -/** - * Upgrade a ReadableStream to an AsyncIterable if it isn't already - * - * ReadableStream (e.g res.body) is asyncIterable in node, but not in chrome, yet. - * see: https://bugs.chromium.org/p/chromium/issues/detail?id=929585 - */ -function asAsyncIterable(readable) { - // @ts-ignore how to convince tsc that we are checking the type here? - return Symbol.asyncIterator in readable ? readable : toIterable(readable); -} diff --git a/dist/esm/unpack/utils/verifying-get-only-blockstore.js b/dist/esm/unpack/utils/verifying-get-only-blockstore.js deleted file mode 100644 index 4cf7856..0000000 --- a/dist/esm/unpack/utils/verifying-get-only-blockstore.js +++ /dev/null @@ -1,35 +0,0 @@ -import { equals } from 'uint8arrays/equals'; -import { sha256 } from 'multiformats/hashes/sha2'; -import { BaseBlockstore } from 'blockstore-core'; -export class VerifyingGetOnlyBlockStore extends BaseBlockstore { - constructor(blockstore) { - super(); - this.store = blockstore; - } - async get(cid) { - const res = await this.store.get(cid); - if (!res) { - throw new Error(`Incomplete CAR. Block missing for CID ${cid}`); - } - if (!isValid({ cid, bytes: res })) { - throw new Error(`Invalid CAR. Hash of block data does not match CID ${cid}`); - } - return res; - } - static fromBlockstore(b) { - return new VerifyingGetOnlyBlockStore(b); - } - static fromCarReader(cr) { - return new VerifyingGetOnlyBlockStore({ - // Return bytes in the same fashion as a Blockstore implementation - get: async (cid) => { - const block = await cr.get(cid); - return block === null || block === void 0 ? void 0 : block.bytes; - } - }); - } -} -async function isValid({ cid, bytes }) { - const hash = await sha256.digest(bytes); - return equals(hash.digest, cid.multihash.digest); -} diff --git a/dist/types/blockstore/fs.d.ts b/dist/types/blockstore/fs.d.ts deleted file mode 100644 index aad6299..0000000 --- a/dist/types/blockstore/fs.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -/// -import { CID } from 'multiformats'; -import { BaseBlockstore } from 'blockstore-core'; -import { Blockstore } from './index'; -export declare class FsBlockStore extends BaseBlockstore implements Blockstore { - path: string; - _opened: boolean; - _opening?: Promise; - constructor(); - _open(): Promise; - put(cid: CID, bytes: Uint8Array): Promise; - get(cid: CID): Promise; - has(cid: CID): Promise; - blocks(): AsyncGenerator<{ - cid: CID; - bytes: Buffer; - }, void, unknown>; - close(): Promise; -} diff --git a/dist/types/blockstore/idb.d.ts b/dist/types/blockstore/idb.d.ts deleted file mode 100644 index 94f35ea..0000000 --- a/dist/types/blockstore/idb.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { CID } from 'multiformats'; -import { BaseBlockstore } from 'blockstore-core'; -import { Blockstore } from './index'; -/** - * Save blocks to IndexedDB in the browser via idb-keyval - * Creates a probably unique indexed db per instance to ensure that the - * blocks iteration method only returns blocks from this invocation, - * and so that the caller can destory it without affecting others. - */ -export declare class IdbBlockStore extends BaseBlockstore implements Blockstore { - private store; - constructor(); - blocks(): AsyncGenerator<{ - cid: CID; - bytes: any; - }, void, unknown>; - put(cid: CID, bytes: Uint8Array): Promise; - get(cid: CID): Promise; - has(cid: CID): Promise; - close(): Promise; -} diff --git a/dist/types/blockstore/index.d.ts b/dist/types/blockstore/index.d.ts deleted file mode 100644 index b4b96d2..0000000 --- a/dist/types/blockstore/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { Block } from '@ipld/car/api'; -import type { Blockstore as IpfsBlockstore } from 'interface-blockstore'; -export interface Blockstore extends IpfsBlockstore { - blocks(): AsyncGenerator; -} diff --git a/dist/types/blockstore/memory.d.ts b/dist/types/blockstore/memory.d.ts deleted file mode 100644 index 104f17e..0000000 --- a/dist/types/blockstore/memory.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { CID } from 'multiformats'; -import { BaseBlockstore } from 'blockstore-core'; -import { Blockstore } from './index'; -export declare class MemoryBlockStore extends BaseBlockstore implements Blockstore { - store: Map; - constructor(); - blocks(): AsyncGenerator<{ - cid: CID; - bytes: Uint8Array; - }, void, unknown>; - put(cid: CID, bytes: Uint8Array): Promise; - get(cid: CID): Promise; - has(cid: CID): Promise; - close(): Promise; -} diff --git a/dist/types/cli/cli.d.ts b/dist/types/cli/cli.d.ts deleted file mode 100644 index d1ebc9c..0000000 --- a/dist/types/cli/cli.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -#!/usr/bin/env node -export {}; diff --git a/dist/types/cli/lib.d.ts b/dist/types/cli/lib.d.ts deleted file mode 100644 index f538aba..0000000 --- a/dist/types/cli/lib.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -export declare function listFilesInCar({ input }: { - input: string; -}): Promise; -export declare function listCidsInCar({ input }: { - input: string; -}): Promise; -export declare function listRootsInCar({ input }: { - input: string; -}): Promise; diff --git a/dist/types/pack/blob.d.ts b/dist/types/pack/blob.d.ts deleted file mode 100644 index a296b45..0000000 --- a/dist/types/pack/blob.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { ImportCandidateStream } from 'ipfs-core-types/src/utils'; -export type { ImportCandidateStream }; -import type { PackProperties } from './index'; -export declare function packToBlob({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackProperties): Promise<{ - root: import("multiformats").CID; - car: Blob; -}>; diff --git a/dist/types/pack/constants.d.ts b/dist/types/pack/constants.d.ts deleted file mode 100644 index 7181c9e..0000000 --- a/dist/types/pack/constants.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { UserImporterOptions } from 'ipfs-unixfs-importer/types/src/types'; -export declare const unixfsImporterOptionsDefault: UserImporterOptions; diff --git a/dist/types/pack/fs.d.ts b/dist/types/pack/fs.d.ts deleted file mode 100644 index 9ba1319..0000000 --- a/dist/types/pack/fs.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type { PackProperties } from './index'; -export interface PackToFsProperties extends PackProperties { - input: string | Iterable | AsyncIterable; - output?: string; -} -export declare function packToFs({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackToFsProperties): Promise<{ - root: import("multiformats").CID; - filename: string; -}>; diff --git a/dist/types/pack/index.d.ts b/dist/types/pack/index.d.ts deleted file mode 100644 index f3126e5..0000000 --- a/dist/types/pack/index.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { ImportCandidateStream, ImportCandidate } from 'ipfs-core-types/src/utils'; -import type { MultihashHasher } from 'multiformats/hashes/interface'; -export type { ImportCandidateStream }; -import { Blockstore } from '../blockstore/index'; -import { CIDVersion } from "multiformats/types/src/cid"; -export interface PackProperties { - input: ImportCandidateStream | ImportCandidate; - blockstore?: Blockstore; - maxChunkSize?: number; - maxChildrenPerNode?: number; - wrapWithDirectory?: boolean; - hasher?: MultihashHasher; - /** - * Use raw codec for leaf nodes. Default: true. - */ - rawLeaves?: boolean; - cidVersion?: CIDVersion | undefined; -} -export declare function pack({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackProperties): Promise<{ - root: import("multiformats/types/src/cid").CID; - out: AsyncIterable; -}>; diff --git a/dist/types/pack/stream.d.ts b/dist/types/pack/stream.d.ts deleted file mode 100644 index d7b3c98..0000000 --- a/dist/types/pack/stream.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -/// -import { Writable } from 'stream'; -import type { PackProperties } from './index'; -export interface PackToStreamProperties extends PackProperties { - input: string | Iterable | AsyncIterable; - writable: Writable; -} -export declare function packToStream({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackToStreamProperties): Promise<{ - root: import("multiformats").CID; -}>; diff --git a/dist/types/pack/utils/normalise-input.d.ts b/dist/types/pack/utils/normalise-input.d.ts deleted file mode 100644 index 4cc83f0..0000000 --- a/dist/types/pack/utils/normalise-input.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { ImportCandidateStream, ImportCandidate } from 'ipfs-core-types/src/utils'; -/** - * Get a single or multiple normaliser depending on the input. - */ -export declare function getNormaliser(input: ImportCandidateStream | ImportCandidate): AsyncGenerator; diff --git a/dist/types/unpack/fs.d.ts b/dist/types/unpack/fs.d.ts deleted file mode 100644 index 29f9a13..0000000 --- a/dist/types/unpack/fs.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { CID } from 'multiformats'; -import { UnixFSEntry } from 'ipfs-unixfs-exporter'; -import { Blockstore } from '../blockstore/index'; -export declare function unpackToFs({ input, roots, output }: { - input: string; - roots?: CID[]; - output?: string; -}): Promise; -export declare function unpackStreamToFs({ input, roots, output, blockstore: userBlockstore }: { - input: AsyncIterable; - roots?: CID[]; - output?: string; - blockstore?: Blockstore; -}): Promise; -export declare function writeFiles(source: AsyncIterable, output?: string): Promise; diff --git a/dist/types/unpack/index.d.ts b/dist/types/unpack/index.d.ts deleted file mode 100644 index 1e2fc2c..0000000 --- a/dist/types/unpack/index.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { CarReader } from '@ipld/car/api'; -import { CID } from 'multiformats'; -import type { UnixFSEntry } from 'ipfs-unixfs-exporter'; -export type { UnixFSEntry }; -import { Blockstore } from '../blockstore/index'; -export declare function unpack(carReader: CarReader, roots?: CID[]): AsyncIterable; -export declare function unpackStream(readable: ReadableStream | AsyncIterable, { roots, blockstore: userBlockstore }?: { - roots?: CID[]; - blockstore?: Blockstore; -}): AsyncIterable; diff --git a/dist/types/unpack/utils/verifying-get-only-blockstore.d.ts b/dist/types/unpack/utils/verifying-get-only-blockstore.d.ts deleted file mode 100644 index 05b0f5e..0000000 --- a/dist/types/unpack/utils/verifying-get-only-blockstore.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { CID } from 'multiformats'; -import { CarReader } from '@ipld/car/api'; -import { BaseBlockstore } from 'blockstore-core'; -import { Blockstore } from '../../blockstore/index'; -declare type verifyingBlockStore = { - get: (cid: CID) => Promise; -}; -export declare class VerifyingGetOnlyBlockStore extends BaseBlockstore { - store: verifyingBlockStore; - constructor(blockstore: verifyingBlockStore); - get(cid: CID): Promise; - static fromBlockstore(b: Blockstore): VerifyingGetOnlyBlockStore; - static fromCarReader(cr: CarReader): VerifyingGetOnlyBlockStore; -} -export {}; diff --git a/src/pack/index.ts b/src/pack/index.ts index 6d83fc5..c696cf2 100644 --- a/src/pack/index.ts +++ b/src/pack/index.ts @@ -11,7 +11,6 @@ export type { ImportCandidateStream } import { Blockstore } from '../blockstore/index' import { MemoryBlockStore } from '../blockstore/memory' import { unixfsImporterOptionsDefault } from './constants' -import {CIDVersion} from "multiformats/types/src/cid"; export interface PackProperties { input: ImportCandidateStream | ImportCandidate, @@ -24,10 +23,9 @@ export interface PackProperties { * Use raw codec for leaf nodes. Default: true. */ rawLeaves?: boolean - cidVersion?: CIDVersion | undefined } -export async function pack ({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackProperties) { +export async function pack ({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackProperties) { if (!input || (Array.isArray(input) && !input.length)) { throw new Error('missing input file(s)') } @@ -43,8 +41,7 @@ export async function pack ({ input, blockstore: userBlockstore, hasher, maxChun maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, - cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves }) )) diff --git a/src/pack/stream.ts b/src/pack/stream.ts index d025860..8306bb1 100644 --- a/src/pack/stream.ts +++ b/src/pack/stream.ts @@ -21,7 +21,7 @@ export interface PackToStreamProperties extends PackProperties { } // Node version of toCar with Node Stream Writable -export async function packToStream ({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackToStreamProperties) { +export async function packToStream ({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackToStreamProperties) { if (!input || (Array.isArray(input) && !input.length)) { throw new Error('given input could not be parsed correctly') } @@ -39,8 +39,7 @@ export async function packToStream ({ input, writable, blockstore: userBlockstor maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, - cidVersion: cidVersion || unixfsImporterOptionsDefault.cidVersion + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves }) )) From 1dc15051bf4ac0895efcd5b58430875f4a19a70a Mon Sep 17 00:00:00 2001 From: jtsmedley <38006759+jtsmedley@users.noreply.github.com> Date: Tue, 2 Aug 2022 13:15:47 -0500 Subject: [PATCH 3/3] Adds support for setting the CID version (#2) * Adds support for setting the CID version --- src/pack/index.ts | 7 +++++-- src/pack/stream.ts | 5 +++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/pack/index.ts b/src/pack/index.ts index c696cf2..8c6112e 100644 --- a/src/pack/index.ts +++ b/src/pack/index.ts @@ -11,6 +11,7 @@ export type { ImportCandidateStream } import { Blockstore } from '../blockstore/index' import { MemoryBlockStore } from '../blockstore/memory' import { unixfsImporterOptionsDefault } from './constants' +import { CIDVersion } from "multiformats/types/src/cid"; export interface PackProperties { input: ImportCandidateStream | ImportCandidate, @@ -23,9 +24,10 @@ export interface PackProperties { * Use raw codec for leaf nodes. Default: true. */ rawLeaves?: boolean + cidVersion?: CIDVersion | undefined } -export async function pack ({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackProperties) { +export async function pack ({ input, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackProperties) { if (!input || (Array.isArray(input) && !input.length)) { throw new Error('missing input file(s)') } @@ -41,7 +43,8 @@ export async function pack ({ input, blockstore: userBlockstore, hasher, maxChun maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: typeof cidVersion === "number" ? cidVersion : unixfsImporterOptionsDefault.cidVersion }) )) diff --git a/src/pack/stream.ts b/src/pack/stream.ts index 8306bb1..b78efa9 100644 --- a/src/pack/stream.ts +++ b/src/pack/stream.ts @@ -21,7 +21,7 @@ export interface PackToStreamProperties extends PackProperties { } // Node version of toCar with Node Stream Writable -export async function packToStream ({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackToStreamProperties) { +export async function packToStream ({ input, writable, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves, cidVersion }: PackToStreamProperties) { if (!input || (Array.isArray(input) && !input.length)) { throw new Error('given input could not be parsed correctly') } @@ -39,7 +39,8 @@ export async function packToStream ({ input, writable, blockstore: userBlockstor maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves, + cidVersion: typeof cidVersion === "number" ? cidVersion : unixfsImporterOptionsDefault.cidVersion }) ))