diff --git a/src/blockstore/fs.ts b/src/blockstore/fs.ts index ed9106e..43e525a 100644 --- a/src/blockstore/fs.ts +++ b/src/blockstore/fs.ts @@ -1,18 +1,18 @@ import fs from 'fs' -import os from 'os' import { CID } from 'multiformats' import { BaseBlockstore } from 'blockstore-core' import { Blockstore } from './index' +import * as os from "os"; export class FsBlockStore extends BaseBlockstore implements Blockstore { path: string _opened: boolean _opening?: Promise - constructor () { + constructor (path?: string) { super() - this.path = `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}` + this.path = path ? path : `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}` this._opened = false } diff --git a/src/pack/fs.ts b/src/pack/fs.ts index 6dcf51c..fed3990 100644 --- a/src/pack/fs.ts +++ b/src/pack/fs.ts @@ -1,5 +1,4 @@ import fs from 'fs' -import os from 'os' import path from 'path' import moveFile from 'move-file' @@ -13,8 +12,10 @@ export interface PackToFsProperties extends PackProperties { } export async function packToFs ({ input, output, blockstore: userBlockstore, hasher, maxChunkSize, maxChildrenPerNode, wrapWithDirectory, rawLeaves }: PackToFsProperties) { - const blockstore = userBlockstore ? userBlockstore : new FsBlockStore() - const location = output || `${os.tmpdir()}/${(parseInt(String(Math.random() * 1e9), 10)).toString() + Date.now()}` + const realpath = path.basename(await fs.promises.realpath(input as string)) + const inputBasename = realpath === "/" ? "file" : realpath + const blockstore = userBlockstore ? userBlockstore : new FsBlockStore(`/tmp/${inputBasename}.tmp.${process.pid}`) + const location = output || `${process.cwd()}/.${inputBasename}.car.tmp.${process.pid}` const writable = fs.createWriteStream(location) const { root } = await packToStream({ @@ -35,7 +36,7 @@ export async function packToFs ({ input, output, blockstore: userBlockstore, has // Move to work dir if (!output) { const basename = typeof input === 'string' ? path.parse(path.basename(input)).name : root.toString() - const filename = `${basename}.car` + const filename = basename === "/" ? "file.car" : `${basename}.car` await moveFile(location, `${process.cwd()}/${filename}`) return {root, filename} diff --git a/src/pack/stream.ts b/src/pack/stream.ts index 8306bb1..d3c559c 100644 --- a/src/pack/stream.ts +++ b/src/pack/stream.ts @@ -14,6 +14,8 @@ import { MemoryBlockStore } from '../blockstore/memory' import { unixfsImporterOptionsDefault } from './constants' import type { PackProperties } from './index' +import {Blockstore} from "../blockstore"; +import {FsBlockStore} from "../blockstore/fs"; export interface PackToStreamProperties extends PackProperties { input: string | Iterable | AsyncIterable, @@ -27,21 +29,32 @@ export async function packToStream ({ input, writable, blockstore: userBlockstor } input = typeof input === 'string' ? [input] : input - const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore() + if (userBlockstore) { + process.on("SIGINT", async (signal) => await handleSignal(signal, userBlockstore)); + process.on("SIGTERM", async (signal) => await handleSignal(signal, userBlockstore)); + } - // Consume the source - const rootEntry = await last(pipe( - legacyGlobSource(input), - source => normaliseInput(source), - (source: any) => importer(source, blockstore, { - ...unixfsImporterOptionsDefault, - hasher: hasher || unixfsImporterOptionsDefault.hasher, - maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, - maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, - wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, - rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves - }) - )) + const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore() + let rootEntry + + try { + // Consume the source + rootEntry = await last(pipe( + legacyGlobSource(input), + source => normaliseInput(source), + (source: any) => importer(source, blockstore, { + ...unixfsImporterOptionsDefault, + hasher: hasher || unixfsImporterOptionsDefault.hasher, + maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize, + maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode, + wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory, + rawLeaves: rawLeaves == null ? unixfsImporterOptionsDefault.rawLeaves : rawLeaves + }) + )) + } catch (err) { + // tslint:disable-next-line:no-console + console.log("Error while importing") + } if (!rootEntry || !rootEntry.cid) { throw new Error('given input could not be parsed correctly') @@ -52,8 +65,13 @@ export async function packToStream ({ input, writable, blockstore: userBlockstor const { writer, out } = await CarWriter.create([root]) Readable.from(out).pipe(writable) - for await (const block of blockstore.blocks()) { - await writer.put(block) + try { + for await (const block of blockstore.blocks()) { + await writer.put(block) + } + } catch (err) { + // tslint:disable-next-line:no-console + console.log("Error while writing blocks") } await writer.close() @@ -86,3 +104,10 @@ async function * legacyGlobSource (input: Iterable | AsyncIterable, roots?: CID[], output?: string, blockstore?: Blockstore}) { - const blockstore = userBlockstore ? userBlockstore : new FsBlockStore() + const blockstore = userBlockstore ? userBlockstore : new FsBlockStore(output ? output : (roots ? roots[0].toString() : "output")) await writeFiles(unpackStream(input, { roots, blockstore }), output) if (!userBlockstore) { await blockstore.close()