From 7493c27aae443063be360aeba3d8699852948b1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Uhl=C3=AD=C5=99?= Date: Wed, 22 Sep 2021 11:26:20 +0200 Subject: [PATCH] feat: streaming directory using multipart --- package-lock.json | 21 +++--------- package.json | 1 + src/bee.ts | 10 +++--- src/modules/bzz.ts | 12 +++---- src/types/index.ts | 9 +++-- src/utils/collection.ts | 42 ++++++----------------- src/utils/data.browser.ts | 54 +++++++++++++----------------- src/utils/data.ts | 45 +++++++++++++++++++------ src/utils/file.ts | 40 ---------------------- src/utils/stream.ts | 35 +++++++++++++++++++ src/utils/tar.ts | 30 ----------------- src/utils/type.ts | 41 ++++++++++++++++++++++- test/integration/bee-class.spec.ts | 37 ++++++++++++-------- 13 files changed, 190 insertions(+), 187 deletions(-) delete mode 100644 src/utils/file.ts delete mode 100644 src/utils/tar.ts diff --git a/package-lock.json b/package-lock.json index e2b8153a..f4a6ccb3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,6 +11,7 @@ "dependencies": { "cross-blob": "^2.0.1", "elliptic": "^6.5.4", + "form-data": "^3.0.1", "isomorphic-ws": "^4.0.1", "js-sha3": "^0.8.0", "ky": "^0.25.1", @@ -4851,8 +4852,7 @@ "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "node_modules/at-least-node": { "version": "1.0.0", @@ -5507,7 +5507,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "dependencies": { "delayed-stream": "~1.0.0" }, @@ -5864,7 +5863,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true, "engines": { "node": ">=0.4.0" } @@ -7269,7 +7267,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", - "dev": true, "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -12627,7 +12624,6 @@ "version": "1.47.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.47.0.tgz", "integrity": "sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw==", - "dev": true, "engines": { "node": ">= 0.6" } @@ -12636,7 +12632,6 @@ "version": "2.1.30", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.30.tgz", "integrity": "sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg==", - "dev": true, "dependencies": { "mime-db": "1.47.0" }, @@ -19613,8 +19608,7 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" }, "at-least-node": { "version": "1.0.0", @@ -20112,7 +20106,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "requires": { "delayed-stream": "~1.0.0" } @@ -20393,8 +20386,7 @@ "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" }, "depcheck": { "version": "1.4.0", @@ -21427,7 +21419,6 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", - "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -25525,14 +25516,12 @@ "mime-db": { "version": "1.47.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.47.0.tgz", - "integrity": "sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw==", - "dev": true + "integrity": "sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw==" }, "mime-types": { "version": "2.1.30", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.30.tgz", "integrity": "sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg==", - "dev": true, "requires": { "mime-db": "1.47.0" } diff --git a/package.json b/package.json index a640a953..6f72fd73 100644 --- a/package.json +++ b/package.json @@ -50,6 +50,7 @@ "dependencies": { "cross-blob": "^2.0.1", "elliptic": "^6.5.4", + "form-data": "^3.0.1", "isomorphic-ws": "^4.0.1", "js-sha3": "^0.8.0", "ky": "^0.25.1", diff --git a/src/bee.ts b/src/bee.ts index 9432b438..b7fcce22 100644 --- a/src/bee.ts +++ b/src/bee.ts @@ -9,7 +9,6 @@ import * as stamps from './modules/stamps' import { BeeArgumentError, BeeError } from './utils/error' import { prepareWebsocketData } from './utils/data' -import { fileArrayBuffer, isFile } from './utils/file' import { makeFeedReader, makeFeedWriter } from './feed' import { makeSigner } from './chunk/signer' import { assertFeedType, DEFAULT_FEED_TYPE, FeedType } from './feed/type' @@ -33,10 +32,12 @@ import { assertPublicKey, assertReference, assertUploadOptions, + assertCollection, + isFile, makeTagUid, } from './utils/type' import { setJsonData, getJsonData } from './feed/json' -import { makeCollectionFromFS, makeCollectionFromFileList, assertCollection } from './utils/collection' +import { makeCollectionFromFS, makeCollectionFromFileList } from './utils/collection' import { AllTagsOptions, Collection, @@ -224,12 +225,11 @@ export class Bee { } if (isFile(data)) { - const fileData = await fileArrayBuffer(data) const fileName = name ?? data.name const contentType = data.type const fileOptions = { contentType, ...options } - return bzz.uploadFile(this.ky, fileData, postageBatchId, fileName, fileOptions) + return bzz.uploadFile(this.ky, data.stream(), postageBatchId, fileName, fileOptions) } else if (isReadable(data) && options?.tag && !options.size) { // TODO: Needed until https://github.com/ethersphere/bee/issues/2317 is resolved const result = await bzz.uploadFile(this.ky, data, postageBatchId, name, options) @@ -297,7 +297,7 @@ export class Bee { if (options) assertCollectionUploadOptions(options) - const data = await makeCollectionFromFileList(fileList) + const data = makeCollectionFromFileList(fileList) return bzz.uploadCollection(this.ky, data, postageBatchId, options) } diff --git a/src/modules/bzz.ts b/src/modules/bzz.ts index 1f8009da..733f56ba 100644 --- a/src/modules/bzz.ts +++ b/src/modules/bzz.ts @@ -13,12 +13,10 @@ import { } from '../types' import { extractUploadHeaders, readFileHeaders } from '../utils/headers' import { http } from '../utils/http' -import { prepareData } from '../utils/data' -import { makeTar } from '../utils/tar' -import { assertCollection } from '../utils/collection' +import { prepareCollection, prepareData } from '../utils/data' import { wrapBytesWithHelpers } from '../utils/bytes' import { isReadable } from '../utils/stream' -import { makeTagUid } from '../utils/type' +import { assertCollection, makeTagUid } from '../utils/type' const bzzEndpoint = 'bzz' @@ -153,20 +151,18 @@ function extractCollectionUploadHeaders( */ export async function uploadCollection( ky: Ky, - collection: Collection, + collection: Collection, postageBatchId: BatchId, options?: CollectionUploadOptions, ): Promise { assertCollection(collection) - const tarData = makeTar(collection) const response = await http<{ reference: Reference }>(ky, { method: 'post', path: bzzEndpoint, - body: tarData, + body: await prepareCollection(collection), responseType: 'json', headers: { - 'content-type': 'application/x-tar', 'swarm-collection': 'true', ...extractCollectionUploadHeaders(postageBatchId, options), }, diff --git a/src/types/index.ts b/src/types/index.ts index dd33f47a..20a8121b 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -57,7 +57,7 @@ export type Address = HexString /** * Type representing Readable stream that abstracts away implementation especially the difference between - * browser and NodeJS versions as both are supported. + * browser (WHATWG) and NodeJS versions as both are supported. */ export type Readable = NativeReadable | CompatibilityReadable | ReadableStream | ReadableStreamPonyfill @@ -272,9 +272,14 @@ export interface CollectionEntry { data: T /** - * + * Path in the directory structure */ path: string + + /** + * If data is Readable then length has to be specified as well! + */ + length?: number } /** diff --git a/src/utils/collection.ts b/src/utils/collection.ts index 77a259c0..55dc07ab 100644 --- a/src/utils/collection.ts +++ b/src/utils/collection.ts @@ -1,31 +1,9 @@ import fs from 'fs' import path from 'path' -import { Collection } from '../types' -import { BeeArgumentError } from './error' -import { fileArrayBuffer } from './file' -import { isUint8Array } from './type' - -export function isCollection(data: unknown): data is Collection { - if (!Array.isArray(data)) { - return false - } - - return data.every(entry => typeof entry === 'object' && entry.data && entry.path && isUint8Array(entry.data)) -} +import { Collection, Readable } from '../types' +import { Readable as NodeReadable } from 'stream' -export function assertCollection(data: unknown): asserts data is Collection { - if (!isCollection(data)) { - throw new BeeArgumentError('invalid collection', data) - } -} - -/** - * Creates array in the format of Collection with data loaded from directory on filesystem. - * The function loads all the data into memory! - * - * @param dir path to the directory - */ -export async function makeCollectionFromFS(dir: string): Promise> { +export async function makeCollectionFromFS(dir: string): Promise> { if (typeof dir !== 'string') { throw new TypeError('dir has to be string!') } @@ -37,11 +15,11 @@ export async function makeCollectionFromFS(dir: string): Promise> { +async function buildCollectionRelative(dir: string, relativePath: string): Promise> { // Handles case when the dir is not existing or it is a file ==> throws an error const dirname = path.join(dir, relativePath) const entries = await fs.promises.opendir(dirname) - let collection: Collection = [] + let collection: Collection = [] for await (const entry of entries) { const fullPath = path.join(dir, relativePath, entry.name) @@ -50,7 +28,8 @@ async function buildCollectionRelative(dir: string, relativePath: string): Promi if (entry.isFile()) { collection.push({ path: entryPath, - data: new Uint8Array(await fs.promises.readFile(fullPath)), + data: fs.createReadStream(fullPath), + length: (await fs.promises.stat(fullPath)).size, }) } else if (entry.isDirectory()) { collection = [...(await buildCollectionRelative(dir, entryPath)), ...collection] @@ -83,8 +62,8 @@ function makeFilePath(file: WebkitFile) { throw new TypeError('file is not valid File object') } -export async function makeCollectionFromFileList(fileList: FileList | File[]): Promise> { - const collection: Collection = [] +export function makeCollectionFromFileList(fileList: FileList | File[]): Collection { + const collection: Collection = [] for (let i = 0; i < fileList.length; i++) { const file = fileList[i] as WebkitFile @@ -92,7 +71,8 @@ export async function makeCollectionFromFileList(fileList: FileList | File[]): P if (file) { collection.push({ path: makeFilePath(file), - data: new Uint8Array(await fileArrayBuffer(file)), + data: file.stream(), + length: file.size, }) } } diff --git a/src/utils/data.browser.ts b/src/utils/data.browser.ts index 2db429bf..3be5f8a4 100644 --- a/src/utils/data.browser.ts +++ b/src/utils/data.browser.ts @@ -1,6 +1,8 @@ -import { isNodeReadable, isReadableStream } from './stream' -import { Readable } from '../types' +import { bufferReadable, isReadable } from './stream' +import { Collection, Readable } from '../types' import Blob from 'cross-blob' +import { FormData } from 'formdata-node' +import { BeeError } from './error' /** * Validates input and converts to Uint8Array @@ -24,34 +26,8 @@ export async function prepareData( // there are already first experiments on this field (Chromium) // but till it is fully implemented across browsers-land we have to // buffer the data before sending the requests. - if (isNodeReadable(data)) { - return new Promise(resolve => { - const buffers: Array = [] - data.on('data', d => { - buffers.push(d) - }) - data.on('end', () => { - resolve(new Blob(buffers, { type: 'application/octet-stream' })) - }) - }) - } - - if (isReadableStream(data)) { - return new Promise(async resolve => { - const reader = data.getReader() - const buffers: Array = [] - - let done, value - do { - ;({ done, value } = await reader.read()) - - if (!done) { - buffers.push(value) - } - } while (!done) - - resolve(new Blob(buffers, { type: 'application/octet-stream' })) - }) + if (isReadable(data)) { + return bufferReadable(data) } throw new TypeError('unknown data type') @@ -66,3 +42,21 @@ export async function prepareWebsocketData(data: string | ArrayBuffer | Blob): P throw new TypeError('unknown websocket data type') } + +export async function prepareCollection(data: Collection): Promise { + const form = new FormData() + + for (const el of data) { + if (el.data instanceof Uint8Array) { + form.set(el.path, el.data) + } else if (isReadable(el.data)) { + if (!el.length) { + throw new BeeError(`Collection entry '${el.path}' is a stream, but does not have required length!`) + } + + form.set(el.path, await bufferReadable(el.data)) + } + } + + return form +} diff --git a/src/utils/data.ts b/src/utils/data.ts index 1efd7da6..95f1b6fa 100644 --- a/src/utils/data.ts +++ b/src/utils/data.ts @@ -1,16 +1,12 @@ import type { Data } from 'ws' import Blob from 'cross-blob' +import FormData from 'form-data' +import { Buffer } from 'buffer' + import { isNodeReadable, isReadableStream, readableWebToNode } from './stream' -import { Readable } from '../types' - -/** - * Prepare data for valid input for node-fetch. - * - * node-fetch is not using WHATWG ReadableStream but NodeJS Readable so we need to convert in case of ReadableStream, - * but the typings are set to use ReadableStream so hence why type conversion here. - * - * @param data any string, ArrayBuffer, Uint8Array or Readable - */ +import { Collection, Readable } from '../types' +import { isUint8Array } from './type' + export async function prepareData( data: string | ArrayBuffer | Uint8Array | Readable, ): Promise | never> { @@ -46,3 +42,32 @@ export async function prepareWebsocketData(data: Data | Blob): Promise): Promise { + const form = new FormData() + + for (const el of data) { + let resolvedData, length + + if (isReadableStream(el.data)) { + length = el.length + resolvedData = readableWebToNode(el.data) + } else if (isUint8Array(el.data)) { + resolvedData = Buffer.from(el.data) + length = resolvedData.length + } else { + resolvedData = el.data + length = el.length + } + + form.append(el.path, resolvedData, { + filepath: el.path, + knownLength: length, + header: { + 'Content-Length': length, + }, + }) + } + + return form +} diff --git a/src/utils/file.ts b/src/utils/file.ts deleted file mode 100644 index 1530b5e8..00000000 --- a/src/utils/file.ts +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Compatibility functions for working with File API objects - * - * https://developer.mozilla.org/en-US/docs/Web/API/File - */ - -export function isFile(file: unknown): file is File { - // browser - if (typeof File === 'function') { - return file instanceof File - } - - // node.js - const f = file as File - - return ( - typeof f === 'object' && - typeof f.name === 'string' && - (typeof f.stream === 'function' || typeof f.arrayBuffer === 'function') - ) -} - -/** - * Compatibility helper for browsers where the `arrayBuffer function is - * missing from `File` objects. - * - * @param file A File object - */ -export async function fileArrayBuffer(file: File): Promise { - if (file.arrayBuffer) { - return file.arrayBuffer() - } - - // workaround for Safari where arrayBuffer is not supported on Files - return new Promise(resolve => { - const fr = new FileReader() - fr.onload = () => resolve(fr.result as ArrayBuffer) - fr.readAsArrayBuffer(file) - }) -} diff --git a/src/utils/stream.ts b/src/utils/stream.ts index 9795ff92..ed74b473 100644 --- a/src/utils/stream.ts +++ b/src/utils/stream.ts @@ -4,6 +4,7 @@ import { ReadableStream } from 'web-streams-polyfill/ponyfill' import { Readable as NodeReadable, ReadableOptions as NodeReadableOptions } from 'readable-stream' import { Readable } from '../types' +import Blob from 'cross-blob' /** * Validates if passed object is either browser's ReadableStream @@ -144,3 +145,37 @@ export function normalizeToReadableStream(stream: Readable): ReadableStream { throw new TypeError('Passed stream is not Node Readable nor ReadableStream!') } + +export async function bufferReadable(stream: Readable): Promise { + if (isNodeReadable(stream)) { + return new Promise(resolve => { + const buffers: Array = [] + stream.on('data', d => { + buffers.push(d) + }) + stream.on('end', () => { + resolve(new Blob(buffers, { type: 'application/octet-stream' })) + }) + }) + } + + if (isReadableStream(stream)) { + return new Promise(async resolve => { + const reader = stream.getReader() + const buffers: Array = [] + + let done, value + do { + ;({ done, value } = await reader.read()) + + if (!done) { + buffers.push(value) + } + } while (!done) + + resolve(new Blob(buffers, { type: 'application/octet-stream' })) + }) + } + + throw new TypeError('Passed data is nor Node Readable nor WHATWG ReadableStream') +} diff --git a/src/utils/tar.ts b/src/utils/tar.ts deleted file mode 100644 index f7429256..00000000 --- a/src/utils/tar.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { Collection } from '../types' -import Tar from 'tar-js' - -// this is a workaround type so that we are able to pass in Uint8Arrays -// as string to `tar.append` -interface StringLike { - readonly length: number - charCodeAt: (index: number) => number -} - -// converts a string to utf8 Uint8Array and returns it as a string-like -// object that `tar.append` accepts as path -function fixUnicodePath(path: string): StringLike { - const codes = new TextEncoder().encode(path) - - return { - length: codes.length, - charCodeAt: index => codes[index], - } -} - -export function makeTar(data: Collection): Uint8Array { - const tar = new Tar() - for (const entry of data) { - const path = fixUnicodePath(entry.path) - tar.append(path, entry.data) - } - - return tar.out -} diff --git a/src/utils/type.ts b/src/utils/type.ts index 8982267d..8609598f 100644 --- a/src/utils/type.ts +++ b/src/utils/type.ts @@ -21,9 +21,9 @@ import { PSS_TARGET_HEX_LENGTH_MAX, UploadOptions, TransactionHash, + Collection, } from '../types' import { BeeArgumentError } from './error' -import { isFile } from './file' import { assertHexString, assertPrefixedHexString } from './hex' import { isReadable } from './stream' @@ -73,6 +73,45 @@ export function assertNonNegativeInteger(value: unknown, name = 'Value'): assert if (Number(value) < 0) throw new BeeArgumentError(`${name} has to be bigger or equal to zero`, value) } +export function isCollection(data: unknown): data is Collection { + if (!Array.isArray(data)) { + return false + } + + return data.every( + entry => + typeof entry === 'object' && entry.data && entry.path && (isUint8Array(entry.data) || isReadable(entry.data)), + ) +} + +export function assertCollection(data: unknown): asserts data is Collection { + if (!isCollection(data)) { + throw new BeeArgumentError('invalid collection', data) + } +} + +/** + * Compatibility functions for working with File API objects + * + * https://developer.mozilla.org/en-US/docs/Web/API/File + */ + +export function isFile(file: unknown): file is File { + // browser + if (typeof File === 'function') { + return file instanceof File + } + + // node.js + const f = file as File + + return ( + typeof f === 'object' && + typeof f.name === 'string' && + (typeof f.stream === 'function' || typeof f.arrayBuffer === 'function') + ) +} + export function assertReference(value: unknown): asserts value is Reference { try { assertHexString(value, REFERENCE_HEX_LENGTH) diff --git a/test/integration/bee-class.spec.ts b/test/integration/bee-class.spec.ts index bd2d866d..36246e68 100644 --- a/test/integration/bee-class.spec.ts +++ b/test/integration/bee-class.spec.ts @@ -79,11 +79,11 @@ describe('Bee class', () => { }) it('should work with file object', async () => { - const content = new Uint8Array([1, 2, 3]) + const content = 'hello world' const name = 'hello.txt' const type = 'text/plain' const file = { - arrayBuffer: () => content, + stream: () => Readable.from(content), name, type, } as unknown as File @@ -91,16 +91,16 @@ describe('Bee class', () => { const result = await bee.uploadFile(getPostageBatch(), file) const downloadedFile = await bee.downloadFile(result.reference) - expect(downloadedFile.data).toEqual(content) + expect(downloadedFile.data.text()).toEqual(content) expect(downloadedFile.name).toEqual(name) expect(downloadedFile.contentType).toEqual(type) }) it('should work with file object and name overridden', async () => { - const content = new Uint8Array([1, 2, 3]) + const content = 'hello world' const name = 'hello.txt' const file = { - arrayBuffer: () => content, + stream: () => Readable.from(content), name, } as unknown as File const nameOverride = 'hello-override.txt' @@ -108,14 +108,14 @@ describe('Bee class', () => { const result = await bee.uploadFile(getPostageBatch(), file, nameOverride) const downloadedFile = await bee.downloadFile(result.reference) - expect(downloadedFile.data).toEqual(content) + expect(downloadedFile.data.text()).toEqual(content) expect(downloadedFile.name).toEqual(nameOverride) }) it('should work with file object and content-type overridden', async () => { - const content = new Uint8Array([1, 2, 3]) + const content = 'hello world' const file = { - arrayBuffer: () => content, + stream: () => Readable.from(content), name: 'hello.txt', type: 'text/plain', } as unknown as File @@ -124,7 +124,7 @@ describe('Bee class', () => { const result = await bee.uploadFile(getPostageBatch(), file, undefined, { contentType: contentTypeOverride }) const downloadedFile = await bee.downloadFile(result.reference) - expect(downloadedFile.data).toEqual(content) + expect(downloadedFile.data.text()).toEqual(content) expect(downloadedFile.contentType).toEqual(contentTypeOverride) }) @@ -182,18 +182,27 @@ describe('Bee class', () => { }) it('should upload collection', async () => { - const directoryStructure: Collection = [ + const directoryStructure: Collection = [ { - path: '0', + path: 'folder/file', data: new TextEncoder().encode('hello-world'), }, + { + path: 'root-file', + data: Readable.from('hello-readable-world'), + length: 20, + }, ] const result = await bee.uploadCollection(getPostageBatch(), directoryStructure) - const file = await bee.downloadFile(result.reference, directoryStructure[0].path) + const file0 = await bee.downloadFile(result.reference, directoryStructure[0].path) + + expect(file0.name).toEqual(directoryStructure[0].path) + expect(file0.data.text()).toEqual('hello-world') - expect(file.name).toEqual(directoryStructure[0].path) - expect(file.data.text()).toEqual('hello-world') + const file1 = await bee.downloadFile(result.reference, directoryStructure[1].path) + expect(file1.name).toEqual(directoryStructure[1].path) + expect(file1.data.text()).toEqual('hello-readable-world') }) })