Skip to content

Commit

Permalink
Merge pull request #3002 from patrick-rodgers/version-4
Browse files Browse the repository at this point in the history
adding logic to create read stream, tests now pass
  • Loading branch information
patrick-rodgers authored Apr 19, 2024
2 parents 7a12208 + 7080889 commit 84b8c51
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 11 deletions.
40 changes: 30 additions & 10 deletions packages/sp/files/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ import { ISiteUserProps } from "../site-users/types.js";
import { encodePath } from "../utils/encode-path-str.js";
import { IMoveCopyOptions } from "../types.js";
import { ReadableFile } from "./readable-file.js";
import "../context-info/index.js";
import { BatchNever } from "../batching.js";
import { PassThrough, Stream } from "stream";
import "../context-info/index.js";

/**
* Describes a collection of File objects
Expand Down Expand Up @@ -707,7 +707,7 @@ export interface IChunkedOperationProps {
progress: (data: IFileUploadProgressData) => void;
}

export type ValidFileContentSource = Blob | ReadableStream | TransformStream | Stream | PassThrough;
export type ValidFileContentSource = Blob | ReadableStream | TransformStream | Stream | PassThrough | ArrayBuffer;

function applyChunckedOperationDefaults(props: Partial<IChunkedOperationProps>): IChunkedOperationProps {
return {
Expand All @@ -725,8 +725,7 @@ function sourceToReadableStream(source: ValidFileContentSource): ReadableStream

return <any>source.stream();

// eslint-disable-next-line @typescript-eslint/dot-notation
} else if (isPassThrough(source)) {
} else if (hasOn(source)) {

// we probably have a passthrough stream from NodeFetch or some other type that supports "on(data)"
return new ReadableStream({
Expand All @@ -742,30 +741,51 @@ function sourceToReadableStream(source: ValidFileContentSource): ReadableStream
},
});

} else if (isBuffer(source)) {

// we think we have a buffer
return new ReadableStream({
start(controller) {

controller.enqueue(source);
controller.close();
},
});

} else if (isTransform(source)) {

return source.readable;

} else {

return <any>source;
return source;
}
}

const NAME = Symbol.toStringTag;

function isPassThrough(object): object is PassThrough {
function hasOn(object): object is PassThrough | Stream {
// eslint-disable-next-line @typescript-eslint/dot-notation
return typeof object["on"] === "function";
}

// FROM: node-fetch source code
function isBlob(object): object is Blob {
return (
typeof object === "object" &&
return typeof object === "object" &&
typeof object.arrayBuffer === "function" &&
typeof object.type === "string" &&
typeof object.stream === "function" &&
typeof object.constructor === "function" &&
(
/^(Blob|File)$/.test(object[NAME]) ||
/^(Blob|File)$/.test(object.constructor.name)
)
);
);
}

function isBuffer(object): object is ArrayBuffer {
return typeof object === "object" && typeof object.length === "number";
}

function isTransform(object): object is TransformStream {
return typeof object === "object" && typeof object.readable === "object";
}
1 change: 0 additions & 1 deletion test/sp/files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ describe("Files", function () {
expect(file.Name).to.eq(name);
});

// TODO: This is an ArrayBuffer but the addChunked method doesn't seem to support that when getting a readable stream, needs work.
it("addChunked", async function () {

const name = `Testing Chunked - ${getRandomString(4)}.jpg`;
Expand Down

0 comments on commit 84b8c51

Please sign in to comment.