Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 16 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ import { MemoryBlockStore } from 'ipfs-car/blockstore/memory' // You can also us
const { root, out } = await pack({
input: [new Uint8Array([21, 31, 41])],
blockstore: new MemoryBlockStore(),
wrapWithDirectory: true // Wraps input into a directory. Defaults to `true`
wrapWithDirectory: false // Wraps inputs with paths into a directory. Defaults to `false`
maxChunkSize: 262144 // The maximum block size in bytes. Defaults to `262144`. Max safe value is < 1048576 (1MiB)
})

Expand All @@ -117,6 +117,21 @@ for await (const part of out) {
}
```

When using `wrapWithDirectory` functionality, a path for the files needs to be provided, in order to have DAG links properly created. See the example as follows:

```js
import { pack } from 'ipfs-car/pack'
import { MemoryBlockStore } from 'ipfs-car/blockstore/memory' // You can also use the `level-blockstore` module

const { root, out } = await pack({
input: [{
path: 'file.txt',
content: new Uint8Array([21, 31, 41])
}],
wrapWithDirectory: true
})
```

### `ipfs-car/pack/blob`

Takes an [ImportCandidateStream](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-core-types/src/utils.d.ts#L27) and writes it to a [Blob](https://github.com/web-std/io/tree/main/blob).
Expand Down
1,562 changes: 686 additions & 876 deletions package-lock.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion src/pack/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ export const unixfsImporterOptionsDefault = {
maxChunkSize: 262144,
hasher: sha256,
rawLeaves: true,
wrapWithDirectory: true,
wrapWithDirectory: false,
maxChildrenPerNode: 174
} as UserImporterOptions
17 changes: 16 additions & 1 deletion src/pack/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,21 @@ export async function pack ({ input, blockstore: userBlockstore, hasher, maxChun
throw new Error('missing input file(s)')
}

// Transform Web File to Import candidate
if (Array.isArray(input) && input.filter((i) => i.name).length) {
input = input.map((file) => {
if (file.name) {
file.path = file.name
}
return file
})
}

// if we receive byte arrays as input with no path it must include a path or wrapWithDirectory should be disabled
if (Array.isArray(input) && input.filter((i) => !i.path).length && wrapWithDirectory !== false) {
throw new Error('inputs with no path provided need to have a path specified or wrapWithDirectory option must be disabled')
}

const blockstore = userBlockstore ? userBlockstore : new MemoryBlockStore()

// Consume the source
Expand All @@ -38,7 +53,7 @@ export async function pack ({ input, blockstore: userBlockstore, hasher, maxChun
hasher: hasher || unixfsImporterOptionsDefault.hasher,
maxChunkSize: maxChunkSize || unixfsImporterOptionsDefault.maxChunkSize,
maxChildrenPerNode: maxChildrenPerNode || unixfsImporterOptionsDefault.maxChildrenPerNode,
wrapWithDirectory: wrapWithDirectory === false ? false : unixfsImporterOptionsDefault.wrapWithDirectory
wrapWithDirectory: wrapWithDirectory === true ? true : unixfsImporterOptionsDefault.wrapWithDirectory
})
))

Expand Down
99 changes: 91 additions & 8 deletions test/pack/index.browser.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,12 @@ import { MemoryBlockStore } from '../../src/blockstore/memory'
describe('pack', () => {
[MemoryBlockStore].map((Blockstore) => {
describe(`with ${Blockstore.name}`, () => {
it('with iterable input', async () => {
it('with iterable input of one file', async () => {
const { root, out } = await pack({
input: [new Uint8Array([21, 31])],
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new Blockstore()
})

Expand All @@ -20,13 +23,84 @@ describe('pack', () => {
carParts.push(part)
}

expect(root.toString()).to.eql('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354')
expect(root.toString()).to.eql('bafkreifidl2jnal7ycittjrnbki6jasdxwwvpf7fj733vnyhidtusxby4y')
expect(carParts.length).to.eql(4)
})

it('with iterable input and wrapping with directory', async () => {
const { root, out } = await pack({
input: [{
path: 'test.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new Blockstore(),
wrapWithDirectory: true
})

const carParts = []
for await (const part of out) {
carParts.push(part)
}

expect(root.toString()).to.eql('bafybeifo5opnp65qnlowjrmrwsc6xs7tfzlehy4hsraugkaf7afzj2qkvu')
expect(carParts.length).to.eql(7)
})

it('with iterable input of many files', async () => {
const { root, out } = await pack({
input: [
{
path: 'a.txt',
content: new Uint8Array([21, 31])
},
{
path: 'b.txt',
content: new Uint8Array([22, 32])
}
],
blockstore: new Blockstore()
})

const carParts = []
for await (const part of out) {
carParts.push(part)
}

expect(root.toString()).to.eql('bafkreihdoh3xvolzxa4aa3snjjnhkgigs4rbbsj2qdax5kfbtlfewdmx5q')
expect(carParts.length).to.eql(7)
})

it('with iterable input of many files and wrapping with directory', async () => {
const { root, out } = await pack({
input: [
{
path: 'a.txt',
content: new Uint8Array([21, 31])
},
{
path: 'b.txt',
content: new Uint8Array([22, 32])
}
],
blockstore: new Blockstore(),
wrapWithDirectory: true
})

const carParts = []
for await (const part of out) {
carParts.push(part)
}

expect(root.toString()).to.eql('bafybeifvinv2j25rn2dndgrpgeo5bfrknvjpqowoxw7msr4iyub6izyr5a')
expect(carParts.length).to.eql(10)
})

it('can pack with custom unixfs importer options', async () => {
const { root, out } = await pack({
input: [new Uint8Array([21, 31])],
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new Blockstore(),
hasher: sha512,
maxChunkSize: 1048576,
Expand All @@ -45,19 +119,25 @@ describe('pack', () => {

it('returns a car blob', async () => {
const { root, car } = await packToBlob({
input: [new Uint8Array([21, 31])],
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new Blockstore()
})

expect(root.toString()).to.eql('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354')
expect(root.toString()).to.eql('bafkreifidl2jnal7ycittjrnbki6jasdxwwvpf7fj733vnyhidtusxby4y')
})

it('pack does not close provided blockstore', async () => {
const blockstore = new Blockstore()
const spy = sinon.spy(blockstore, 'close')

await pack({
input: [new Uint8Array([21, 31])],
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore
})

Expand All @@ -70,7 +150,10 @@ describe('pack', () => {
const spy = sinon.spy(blockstore, 'close')

await packToBlob({
input: [new Uint8Array([21, 31])],
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore
})

Expand Down
32 changes: 25 additions & 7 deletions test/pack/index.node.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ describe('pack', () => {
const carReader = await CarReader.fromIterable(inStream)
const files = await all(unpack(carReader))

expect(files).to.have.lengthOf(3)
expect(files).to.have.lengthOf(2)
})

it('pack dir to car with filesystem output', async () => {
Expand All @@ -79,7 +79,7 @@ describe('pack', () => {
const carReader = await CarReader.fromIterable(inStream)
const files = await all(unpack(carReader))

expect(files).to.have.lengthOf(3)
expect(files).to.have.lengthOf(2)
})

it('pack raw file to car with filesystem output', async () => {
Expand All @@ -97,7 +97,7 @@ describe('pack', () => {
const carReader = await CarReader.fromIterable(inStream)
const files = await all(unpack(carReader))

expect(files).to.have.lengthOf(2)
expect(files).to.have.lengthOf(1)

const rawOriginalContent = new Uint8Array(fs.readFileSync(`${__dirname}/../fixtures/file.raw`))
const rawContent = (await all(files[files.length - 1].content()))[0]
Expand All @@ -121,7 +121,7 @@ describe('pack', () => {
const carReader = await CarReader.fromIterable(inStream)
const files = await all(unpack(carReader))

expect(files).to.have.lengthOf(2)
expect(files).to.have.lengthOf(1)

const rawOriginalContent = new Uint8Array(fs.readFileSync(`${__dirname}/../fixtures/file.raw`))
const rawContent = (await all(files[files.length - 1].content()))[0]
Expand All @@ -148,7 +148,7 @@ describe('pack', () => {
const carReader = await CarReader.fromIterable(inStream)
const files = await all(unpack(carReader))

expect(files).to.have.lengthOf(2)
expect(files).to.have.lengthOf(1)

const rawOriginalContent = new Uint8Array(fs.readFileSync(`${__dirname}/../fixtures/file.raw`))
const rawContent = (await all(files[files.length - 1].content()))[0]
Expand Down Expand Up @@ -192,12 +192,15 @@ describe('pack', () => {
const blockstore = new Blockstore()

const { car, root } = await packToBlob({
input: [new Uint8Array([21, 31])],
input: [{
path: 'file.txt',
content: new Uint8Array([21, 31])
}],
blockstore
})

expect(car).to.exist
expect(root.toString()).to.eql('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354')
expect(root.toString()).to.eql('bafkreifidl2jnal7ycittjrnbki6jasdxwwvpf7fj733vnyhidtusxby4y')
await blockstore.close()
})

Expand Down Expand Up @@ -229,6 +232,21 @@ describe('pack', () => {
}
throw new Error('pack should throw error with empty input')
})

it('should error to pack byte array input with wrapWithDirectory enabled and not path', async () => {
const blockstore = new Blockstore()

try {
await pack({
input: [new Uint8Array([1, 2, 3])],
blockstore
})
} catch (err) {
expect(err).to.exist
return
}
throw new Error('pack should throw error when byte array is received as content and no path is provided')
})
})
})
})
47 changes: 35 additions & 12 deletions test/unpack/index.browser.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,30 @@ import { IdbBlockStore } from '../../src/blockstore/idb'
describe('unpack', () => {
it('with CarReader input', async () => {
const { out } = await pack({
input: [new Uint8Array([21, 31])],
blockstore: new MemoryBlockStore()
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new MemoryBlockStore(),
wrapWithDirectory: true
})

let bytes = new Uint8Array([])
let carParts = []
for await (const part of out) {
carParts.push(part)
bytes = concat([bytes, new Uint8Array(part)])
}

const carReader = await CarReader.fromBytes(bytes)
const files = await all(unpack(carReader))

expect(files.length).to.eql(1)
expect(files.length).to.eql(2)
expect(files[0].type).to.eql('directory')
expect(files[0].name).to.eql('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354')
expect(files[0].name).to.eql('bafybeiglo54z2343qksf253l2xtsik3n4kdguwtfayhhtn36btqrnlwrsu')
expect(files[1].type).to.eql('raw')
expect(files[1].name).to.eql('a.txt')
expect(files[1].path).to.eql('bafybeiglo54z2343qksf253l2xtsik3n4kdguwtfayhhtn36btqrnlwrsu/a.txt')
})
})

Expand All @@ -35,18 +44,29 @@ describe('unpackStream', () => {
describe(`with ${Blockstore.name}`, () => {
it('with iterable input', async () => {
const { out } = await pack({
input: [new Uint8Array([21, 31])],
blockstore: new MemoryBlockStore()
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new MemoryBlockStore(),
wrapWithDirectory: true
})
const files = await all(unpackStream(out, {blockstore: new Blockstore()}))
expect(files.length).to.eql(1)
expect(files.length).to.eql(2)
expect(files[0].type).to.eql('directory')
expect(files[0].name).to.eql('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354')
expect(files[0].name).to.eql('bafybeiglo54z2343qksf253l2xtsik3n4kdguwtfayhhtn36btqrnlwrsu')
expect(files[1].type).to.eql('raw')
expect(files[1].name).to.eql('a.txt')
expect(files[1].path).to.eql('bafybeiglo54z2343qksf253l2xtsik3n4kdguwtfayhhtn36btqrnlwrsu/a.txt')
})
it('with readablestream input', async () => {
const { out } = await pack({
input: [new Uint8Array([21, 31])],
blockstore: new MemoryBlockStore()
input: [{
path: 'a.txt',
content: new Uint8Array([21, 31])
}],
blockstore: new MemoryBlockStore(),
wrapWithDirectory: true
})
const stream = new ReadableStream({
async pull(controller) {
Expand All @@ -57,9 +77,12 @@ describe('unpackStream', () => {
}
})
const files = await all(unpackStream(stream, {blockstore: new Blockstore()}))
expect(files.length).to.eql(1)
expect(files.length).to.eql(2)
expect(files[0].type).to.eql('directory')
expect(files[0].name).to.eql('bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354')
expect(files[0].name).to.eql('bafybeiglo54z2343qksf253l2xtsik3n4kdguwtfayhhtn36btqrnlwrsu')
expect(files[1].type).to.eql('raw')
expect(files[1].name).to.eql('a.txt')
expect(files[1].path).to.eql('bafybeiglo54z2343qksf253l2xtsik3n4kdguwtfayhhtn36btqrnlwrsu/a.txt')
})
})
})
Expand Down