From f70735f63095f59caefe05baa8640927bde9aac2 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 11 Jun 2025 15:59:00 +0000 Subject: [PATCH 01/30] feat(lib-storage): add S3 Transfer Manager feat(lib-storage): added doc comments for interfaces and types feat(lib-storage): address PR review feedback for transfer manager types feat(lib-storage): addressed minor review feedback feat(lib-storage): added example code file feat(lib-storage): changed "handler" to "listener" and changed type file names feat(lib-storage): created TransferManager class and constructor with defaults feat(lib-storage): beginning implementation for download(), created TM index file feat(lib-storage): range multipart download feat(lib-storage): download() improvements post pair-programming feat(lib-storage): transfermanager download() iteration feat(lib-storage): transfermanager interation post pair programming feat(lib-storage): joinstream iteration, web stream not fully functional feat(lib-storage): both cases of range download handled feat(lib-storage): range download working chore: acquire lock on streams feat(lib-storage): bug fixes and test env setup feat(lib-storage): implemented dispatchEvent(), need to add dispatches for complete and fail feat(lib-storage): requests array, eventListeners revision, needs more testing feat(lib-storage): addEventListener & removeEventListener implemented, needs support for options feat(lib-storage): added support for adding event listeners at request level feat(lib-storage): added ETag verification for subsequent GetObjectRequests feat(lib-storage): totalSize changes, and added validateExpectedRanges() feat(lib-storage): addEventListener once parameter, type fixes feat(lib-storage): s3TM SEP test cases and added TODOs feat(lib-storage): validateExpectedRanges fixes and unit tests feat(lib-storage): added check in validateExpectedRanges if final part doesnt download total object feat(lib-storage): s3TM constructor and add event listener tests feat(lib-storage): addEventListener tests and error checking adjustment and dispatchEvent tests feat(lib-storage): added test cases --- lib/lib-storage/package.json | 1 + lib/lib-storage/src/index.ts | 1 + lib/lib-storage/src/lib-storage.e2e.spec.ts | 3 +- .../S3TransferManager.e2e.spec.ts | 179 ++++++ .../S3TransferManager.spec.ts | 377 +++++++++++++ .../s3-transfer-manager/S3TransferManager.ts | 534 ++++++++++++++++++ .../event-listener-types.ts | 58 ++ .../example-code/upload-download-examples.ts | 177 ++++++ .../src/s3-transfer-manager/index.ts | 3 + .../join-streams.browser.ts | 43 ++ .../src/s3-transfer-manager/join-streams.ts | 71 +++ .../src/s3-transfer-manager/stream-guards.ts | 15 + .../src/s3-transfer-manager/types.ts | 344 +++++++++++ .../gradle/wrapper/gradle-wrapper.properties | 3 +- 14 files changed, 1807 insertions(+), 2 deletions(-) create mode 100644 lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/index.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/join-streams.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/stream-guards.ts create mode 100644 lib/lib-storage/src/s3-transfer-manager/types.ts diff --git a/lib/lib-storage/package.json b/lib/lib-storage/package.json index 92376bc6c283..b55eba950877 100644 --- a/lib/lib-storage/package.json +++ b/lib/lib-storage/package.json @@ -59,6 +59,7 @@ }, "browser": { "./dist-es/runtimeConfig": "./dist-es/runtimeConfig.browser", + "./dist-es/s3-transfer-manager/join-streams": "./dist-es/s3-transfer-manager/join-streams.browser", "fs": false, "stream": "stream-browserify" }, diff --git a/lib/lib-storage/src/index.ts b/lib/lib-storage/src/index.ts index 4a6222b183e6..b4b0787ddb15 100644 --- a/lib/lib-storage/src/index.ts +++ b/lib/lib-storage/src/index.ts @@ -1,2 +1,3 @@ export * from "./Upload"; +export * from "./s3-transfer-manager/index"; export * from "./types"; diff --git a/lib/lib-storage/src/lib-storage.e2e.spec.ts b/lib/lib-storage/src/lib-storage.e2e.spec.ts index deb2c75bcb3b..ff29627e101b 100644 --- a/lib/lib-storage/src/lib-storage.e2e.spec.ts +++ b/lib/lib-storage/src/lib-storage.e2e.spec.ts @@ -6,7 +6,8 @@ import { afterAll, beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../tests/e2e/get-integ-test-resources"; -describe("@aws-sdk/lib-storage", () => { +// todo(s3-transfer-manager): unskip +describe.skip("@aws-sdk/lib-storage", () => { describe.each([undefined, "WHEN_REQUIRED", "WHEN_SUPPORTED"])( "requestChecksumCalculation: %s", (requestChecksumCalculation) => { diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts new file mode 100644 index 000000000000..3f5b6e83e794 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -0,0 +1,179 @@ +import { S3 } from "@aws-sdk/client-s3"; +import { beforeAll, describe, expect, test as it } from "vitest"; + +import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; +import { Upload } from "../Upload"; +import { S3TransferManager } from "./S3TransferManager"; +import type { IS3TransferManager, S3TransferManagerConfig } from "./types"; + +describe(S3TransferManager.name, () => { + const chunk = "01234567"; + + function data(bytes: number) { + let buffer = ""; + while (buffer.length < bytes) { + buffer += chunk; + } + return buffer.slice(0, bytes); + } + + function check(str = "") { + while (str.length > 0) { + expect(str.slice(0, 8)).toEqual(chunk); + str = str.slice(8); + } + } + + let client: S3; + let tmPart: S3TransferManager; + let tmRange: S3TransferManager; + let Bucket: string; + let region: string; + + beforeAll(async () => { + // const integTestResourcesEnv = await getIntegTestResources(); + // Object.assign(process.env, integTestResourcesEnv); + + // region = process?.env?.AWS_SMOKE_TEST_REGION as string; + // Bucket = process?.env?.AWS_SMOKE_TEST_BUCKET as string; + void getIntegTestResources; + + region = "us-west-2"; + Bucket = "lukachad-us-west-2"; + + client = new S3({ + region, + }); + tmPart = new S3TransferManager({ + s3ClientInstance: client, + multipartDownloadType: "PART", + }); + tmRange = new S3TransferManager({ + s3ClientInstance: client, + multipartDownloadType: "RANGE", + }); + }, 120_000); + + describe.skip("multi part download", () => { + const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; + const sizes = [6, 11] as number[]; + + for (const mode of modes) { + for (const size of sizes) { + it(`should download an object of size ${size} with mode ${mode}`, async () => { + const Body = data(size * 1024 * 1024); + const Key = `${mode}-size`; + + if (mode === "PART") { + await new Upload({ + client, + params: { + Bucket, + Key, + Body, + }, + }).done(); + } else { + await client.putObject({ + Bucket, + Key, + Body, + }); + } + + const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; + + let bytesTransferred = 0; + + const download = await tm.download( + { + Bucket, + Key, + }, + { + eventListeners: { + transferInitiated: [({ request, snapshot }) => {}], + bytesTransferred: [ + ({ request, snapshot }) => { + bytesTransferred = snapshot.transferredBytes; + }, + ], + transferComplete: [({ request, snapshot, response }) => {}], + }, + } + ); + const serialized = await download.Body?.transformToString(); + check(serialized); + + expect(bytesTransferred).toEqual(Body.length); + }, 60_000); + } + } + }); + + describe("(SEP) download single object tests", () => { + async function sepTests( + objectType: "single" | "multipart", + multipartType: "PART" | "RANGE", + range: string | undefined, + partNumber: 2 | undefined + ) { + const Body = data(12 * 1024 * 1024); + const Key = `${objectType}${multipartType}${range}${partNumber}`; + const DEFAULT_PART_SIZE = 8 * 1024 * 1024; + + if (multipartType === "PART") { + await new Upload({ + client, + partSize: DEFAULT_PART_SIZE, + params: { + Bucket, + Key, + Body, + }, + }).done(); + } else { + await client.putObject({ + Bucket, + Key, + Body, + }); + } + + const tm: S3TransferManager = multipartType === "PART" ? tmPart : tmRange; + + const download = await tm.download({ + Bucket, + Key, + Range: range, + PartNumber: partNumber, + }); + const serialized = await download.Body?.transformToString(); + check(serialized); + if (partNumber) { + expect(serialized?.length).toEqual(DEFAULT_PART_SIZE); + } else { + expect(serialized?.length).toEqual(Body.length); + } + } + + it("single object: multipartDownloadType = PART, range = 0-12MB, partNumber = null", async () => { + await sepTests("single", "PART", `bytes=0-${12 * 1024 * 1024}`, undefined); + }, 60_000); + it("multipart object: multipartDownloadType = RANGE, range = 0-12MB, partNumber = null", async () => { + await sepTests("multipart", "RANGE", `bytes=0-${12 * 1024 * 1024}`, undefined); + }, 60_000); + it("single object: multipartDownloadType = PART, range = null, partNumber = 2", async () => { + await sepTests("single", "PART", undefined, 2); + }, 60_000); + it("single object: multipartDownloadType = RANGE, range = null, partNumber = 2", async () => { + await sepTests("single", "RANGE", undefined, 2); + }, 60_000); + it("single object: multipartDownloadType = PART, range = null, partNumber = null", async () => { + await sepTests("single", "PART", undefined, undefined); + }, 60_000); + it("single object: multipartDownloadType = RANGE, range = null, partNumber = null", async () => { + await sepTests("single", "RANGE", undefined, undefined); + }, 60_000); + }); +}); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts new file mode 100644 index 000000000000..b8d56721d17a --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -0,0 +1,377 @@ +import { S3, S3Client } from "@aws-sdk/client-s3"; +import { TransferCompleteEvent, TransferEvent } from "@aws-sdk/lib-storage/dist-types/s3-transfer-manager/types"; +import { beforeAll, beforeEach, describe, expect, test as it, vi } from "vitest"; + +import { S3TransferManager } from "./S3TransferManager"; + +/** + * Unit Tests: + * - addEventListener() + * - dispatchEvent() + * - removeEventListener() + * - TM Constructor + * - *iterateListeners() + * - joinStreams() + * - iterateStreams() + * - validateExpectedRanges() + */ + +describe("S3TransferManager Unit Tests", () => { + let client: S3; + let Bucket: string; + let region: string; + + beforeAll(async () => { + region = "us-west-1"; + Bucket = "lukachad-us-west-2"; + + client = new S3({ + region, + responseChecksumValidation: "WHEN_REQUIRED", + }); + }); + describe("S3TransferManager Constructor", () => { + it("Should create an instance of S3TransferManager with defaults given no parameters", () => { + const tm = new S3TransferManager() as any; + + expect(tm.s3ClientInstance).toBeInstanceOf(S3Client); + expect(tm.targetPartSizeBytes).toBe(8 * 1024 * 1024); + expect(tm.multipartUploadThresholdBytes).toBe(16 * 1024 * 1024); + expect(tm.checksumValidationEnabled).toBe(true); + expect(tm.checksumAlgorithm).toBe("CRC32"); + expect(tm.multipartDownloadType).toBe("PART"); + expect(tm.eventListeners).toEqual({ + transferInitiated: [], + bytesTransferred: [], + transferComplete: [], + transferFailed: [], + }); + }); + + it("Should create an instance of S3TransferManager with all given parameters", () => { + const eventListeners = { + transferInitiated: [() => console.log("transferInitiated")], + bytesTransferred: [() => console.log("bytesTransferred")], + transferComplete: [() => console.log("transferComplete")], + transferFailed: [() => console.log("transferFailed")], + }; + const tm = new S3TransferManager({ + s3ClientInstance: client, + targetPartSizeBytes: 8 * 1024 * 1024, + checksumValidationEnabled: true, + checksumAlgorithm: "CRC32", + multipartDownloadType: "RANGE", + eventListeners: eventListeners, + }) as any; + + expect(tm.s3ClientInstance).toBe(client); + expect(tm.targetPartSizeBytes).toBe(8 * 1024 * 1024); + expect(tm.checksumValidationEnabled).toBe(true); + expect(tm.checksumAlgorithm).toBe("CRC32"); + expect(tm.multipartDownloadType).toBe("RANGE"); + expect(tm.eventListeners).toEqual(eventListeners); + }); + + it("Should throw an error given targetPartSizeBytes smaller than minimum", () => { + expect(() => { + new S3TransferManager({ + targetPartSizeBytes: 2 * 1024 * 1024, + }); + }).toThrow(`targetPartSizeBytes must be at least ${5 * 1024 * 1024} bytes`); + }); + }); + + describe("EventListener functions", () => { + let tm: S3TransferManager; + + function initiated(event: TransferEvent) { + return { + request: event.request, + snapshot: event.snapshot, + }; + } + function transferring(event: TransferEvent) { + return { + request: event.request, + snapshot: event.snapshot, + }; + } + function completed(event: TransferCompleteEvent) { + return { + request: event.request, + snapshot: event.snapshot, + response: event.response, + }; + } + function failed(event: TransferEvent) { + return { + request: event.request, + snapshot: event.snapshot, + }; + } + + beforeEach(async () => { + tm = new S3TransferManager({ + s3ClientInstance: client, + }); + }); + + describe("addEventListener()", () => { + it("Should register callbacks for all supported event types", () => { + tm.addEventListener("transferInitiated", initiated); + tm.addEventListener("bytesTransferred", transferring); + tm.addEventListener("transferComplete", completed); + tm.addEventListener("transferFailed", failed); + + expect((tm as any).eventListeners).toEqual({ + transferInitiated: [initiated], + bytesTransferred: [transferring], + transferComplete: [completed], + transferFailed: [failed], + }); + }); + + it("Should handle registering the same listener multiple times", () => { + const callback1 = vi.fn(); + tm.addEventListener("transferInitiated", callback1); + tm.addEventListener("transferInitiated", callback1); + + expect((tm as any).eventListeners.transferInitiated).toEqual([callback1, callback1]); + }); + + it("Should handle different callbacks for the same event type", () => { + const callback1 = vi.fn(); + const callback2 = vi.fn(); + + tm.addEventListener("bytesTransferred", callback1); + tm.addEventListener("bytesTransferred", callback2); + + expect((tm as any).eventListeners.bytesTransferred).toEqual([callback1, callback2]); + }); + + it("Should handle object-style callbacks", () => { + const objectCallback = { + handleEvent: vi.fn(), + }; + tm.addEventListener("transferInitiated", objectCallback as any); + + expect((tm as any).eventListeners.transferInitiated).toEqual([objectCallback]); + }); + + it("Should handle a mix of object-style callbacks and function for the same event", () => { + const callback = vi.fn(); + const objectCallback = { + handleEvent: vi.fn(), + }; + tm.addEventListener("transferInitiated", objectCallback as any); + tm.addEventListener("transferInitiated", callback); + + expect((tm as any).eventListeners.transferInitiated).toEqual([objectCallback, callback]); + }); + + it("Should throw an error for an invalid event type", () => { + expect(() => { + (tm as any).addEventListener("invalidEvent", initiated); + }).toThrow("Unknown event type: invalidEvent"); + }); + + it("Should handle options.once correctly", () => { + const mockCallback = vi.fn(); + tm.addEventListener("transferInitiated", mockCallback, { once: true }); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + tm.dispatchEvent(event); + + expect(mockCallback).toHaveBeenCalledTimes(1); + }); + + it("Should handle boolean options parameter", () => { + tm.addEventListener("transferInitiated", initiated, true); + expect((tm as any).eventListeners.transferInitiated).toContain(initiated); + }); + + it("Should handle null callback", () => { + expect(() => { + (tm as any).addEventListener("transferInitiated", null); + }).not.toThrow(); + }); + + it("Should handle object-style callback with handleEvent", () => { + const objectCallback = { handleEvent: vi.fn() }; + tm.addEventListener("transferInitiated", objectCallback as any); + expect((tm as any).eventListeners.transferInitiated).toContain(objectCallback); + }); + }); + + describe("dispatchEvent()", () => { + it("Should dispatch an event", () => { + const mockCallback = vi.fn(); + tm.addEventListener("bytesTransferred", mockCallback); + + const event = Object.assign(new Event("bytesTransferred"), { + request: {}, + snapshot: {}, + }); + + const result = tm.dispatchEvent(event); + + expect(mockCallback).toHaveBeenCalledTimes(1); + expect(mockCallback).toHaveBeenCalledWith(event); + expect(result).toBe(true); + }); + + it("Should dispatch an event with request, snapshot, and response information", () => { + const mockCompleted = vi.fn().mockImplementation(completed); + tm.addEventListener("transferComplete", mockCompleted); + + const event = Object.assign(new Event("transferComplete"), { + request: { bucket: "test" }, + snapshot: { bytes: 100 }, + response: { status: "success" }, + }); + + tm.dispatchEvent(event); + + expect(mockCompleted).toHaveBeenCalledWith(event); + expect(mockCompleted).toHaveReturnedWith({ + request: { bucket: "test" }, + snapshot: { bytes: 100 }, + response: { status: "success" }, + }); + }); + + it("Should call multiple listeners for the same event type", () => { + const mockCallback = vi.fn(); + tm.addEventListener("transferInitiated", mockCallback); + tm.addEventListener("transferInitiated", mockCallback); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + const result = tm.dispatchEvent(event); + + expect(mockCallback).toHaveBeenCalledTimes(2); + expect(mockCallback).toHaveBeenCalledWith(event); + expect(result).toBe(true); + }); + + it("Should call listeners in the order they were added", () => { + const callOrder: number[] = []; + const mockCallback1 = vi.fn(() => callOrder.push(1)); + const mockCallback2 = vi.fn(() => callOrder.push(2)); + const mockCallback3 = vi.fn(() => callOrder.push(3)); + + tm.addEventListener("transferInitiated", mockCallback1); + tm.addEventListener("transferInitiated", mockCallback2); + tm.addEventListener("transferInitiated", mockCallback3); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + + expect(callOrder).toEqual([1, 2, 3]); + }); + + it("Should handle object-style callbacks with handleEvent method", () => { + const mockCallback = vi.fn(); + const objectCallback = { + handleEvent: mockCallback, + }; + tm.addEventListener("transferInitiated", objectCallback as any); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + + expect(mockCallback).toHaveBeenCalledTimes(1); + expect(mockCallback).toHaveBeenCalledWith(event); + }); + + it("Should handle events with no registered listeners", () => { + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + const result = tm.dispatchEvent(event); + + expect(result).toBe(true); + }); + + it("Should handle unknown event types", () => {}); + + it("Should handle a mix of object-style callbacks and functions", () => {}); + }); + + describe.skip("removeEventListener()", () => { + it("Should remove a listener from an event", () => {}); + + it("Should remove a listener from an event", () => {}); + + it("Should remove a listener from an event", () => {}); + }); + + describe.skip("iterateListeners()", () => {}); + + describe.skip("joinStreams()", () => {}); + + describe.skip("iterateStreams()", () => {}); + }); + + describe("validateExpectedRanges()", () => { + let tm: any; + beforeAll(async () => { + tm = new S3TransferManager() as any; + }, 120_000); + + it("Should pass correct sequential ranges without throwing an error", () => { + const ranges = [ + "bytes 0-5242879/13631488", + "bytes 5242880-10485759/13631488", + "bytes 10485760-13631487/13631488", + ]; + + for (let i = 1; i < ranges.length; i++) { + expect(() => { + tm.validateExpectedRanges(ranges[i - 1], ranges[i], i + 1); + }).not.toThrow(); + } + }); + + it("Should throw error for incomplete download", () => { + const ranges = [ + "bytes 0-5242879/13631488", + "bytes 5242880-10485759/13631488", + "bytes 10485760-13631480/13631488", // 8 bytes short + ]; + + expect(() => { + tm.validateExpectedRanges(ranges[1], ranges[2], 3); + }).toThrow( + "Range validation failed: Final part did not cover total range of 13631488. Expected range of bytes 10485760-314572" + ); + }); + + it.each([ + ["bytes 5242881-10485759/13631488", "Expected part 2 to start at 5242880 but got 5242881"], // 1 byte off + ["bytes 5242879-10485759/13631488", "Expected part 2 to start at 5242880 but got 5242879"], // overlap + ["bytes 0-5242879/13631488", "Expected part 2 to start at 5242880 but got 0"], // duplicate + ])("Should throw error for non-sequential range: %s", (invalidRange, expectedError) => { + expect(() => { + tm.validateExpectedRanges("bytes 0-5242879/13631488", invalidRange, 2); + }).toThrow(expectedError); + }); + }); +}); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts new file mode 100644 index 000000000000..f7f5282d4148 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -0,0 +1,534 @@ +import type { + _Object as S3Object, + ChecksumAlgorithm, + GetObjectCommandInput, + PutObjectCommandInput, +} from "@aws-sdk/client-s3"; +import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import { getChecksum } from "@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum"; +import { ChecksumConstructor, StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { Checksum } from "@smithy/types"; + +import type { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; +import { joinStreams } from "./join-streams"; +import type { + DownloadRequest, + DownloadResponse, + IS3TransferManager, + S3TransferManagerConfig, + TransferCompleteEvent, + TransferEvent, + TransferEventListeners, + TransferOptions, + UploadRequest, + UploadResponse, +} from "./types"; + +export class S3TransferManager implements IS3TransferManager { + private static MIN_PART_SIZE = 5 * 1024 * 1024; // 5MB + private static DEFAULT_PART_SIZE = 8 * 1024 * 1024; // 8MB + private static MIN_UPLOAD_THRESHOLD = 16 * 1024 * 1024; // 16MB + + private readonly s3ClientInstance: S3Client; + private readonly targetPartSizeBytes: number; + private readonly multipartUploadThresholdBytes: number; + private readonly checksumValidationEnabled: boolean; + private readonly checksumAlgorithm: ChecksumAlgorithm; + private readonly multipartDownloadType: "PART" | "RANGE"; + private readonly eventListeners: TransferEventListeners; + + public constructor(config: S3TransferManagerConfig = {}) { + this.checksumValidationEnabled = config.checksumValidationEnabled ?? true; + + const checksumMode = this.checksumValidationEnabled ? "WHEN_SUPPORTED" : "WHEN_REQUIRED"; + + this.s3ClientInstance = + config.s3ClientInstance ?? + new S3Client({ + requestChecksumCalculation: checksumMode, + responseChecksumValidation: checksumMode, + }); + + this.targetPartSizeBytes = config.targetPartSizeBytes ?? S3TransferManager.DEFAULT_PART_SIZE; + this.multipartUploadThresholdBytes = config.multipartUploadThresholdBytes ?? S3TransferManager.MIN_UPLOAD_THRESHOLD; + + this.checksumAlgorithm = config.checksumAlgorithm ?? "CRC32"; + this.multipartDownloadType = config.multipartDownloadType ?? "PART"; + this.eventListeners = { + transferInitiated: config.eventListeners?.transferInitiated ?? [], + bytesTransferred: config.eventListeners?.bytesTransferred ?? [], + transferComplete: config.eventListeners?.transferComplete ?? [], + transferFailed: config.eventListeners?.transferFailed ?? [], + }; + + this.validateConfig(); + } + + public addEventListener( + type: "transferInitiated", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + public addEventListener( + type: "bytesTransferred", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + public addEventListener( + type: "transferComplete", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + public addEventListener( + type: "transferFailed", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + public addEventListener( + type: string, + callback: EventListener | null, + options?: AddEventListenerOptions | boolean + ): void; + public addEventListener( + type: unknown, + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void { + const eventType = type as keyof TransferEventListeners; + const listeners = this.eventListeners[eventType]; + + if (!listeners) { + throw new Error(`Unknown event type: ${eventType}`); + } + + // TODO: Add support for AbortSignal + + const once = typeof options !== "boolean" && options?.once; + let updatedCallback = callback; + if (once) { + updatedCallback = (event: any) => { + if (typeof callback === "function") { + callback(event); + } else { + callback.handleEvent(event); + } + this.removeEventListener(eventType, updatedCallback); + }; + } + + if (eventType === "transferInitiated" || eventType === "bytesTransferred" || eventType === "transferFailed") { + listeners.push(updatedCallback as EventListener); + } else if (eventType === "transferComplete") { + (listeners as EventListener[]).push( + updatedCallback as EventListener + ); + } + } + + public dispatchEvent(event: Event & TransferEvent): boolean; + public dispatchEvent(event: Event & TransferCompleteEvent): boolean; + public dispatchEvent(event: Event): boolean; + public dispatchEvent(event: any): boolean { + const eventType = event.type; + const listeners = this.eventListeners[eventType as keyof TransferEventListeners]; + + if (listeners) { + for (const callback of listeners) { + if (typeof callback === "function") { + callback(event); + } else { + callback.handleEvent?.(event); + } + } + } + return true; + } + + public removeEventListener( + type: "transferInitiated", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + public removeEventListener( + type: "bytesTransferred", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + public removeEventListener( + type: "transferComplete", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + public removeEventListener( + type: "transferFailed", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + public removeEventListener( + type: string, + callback: EventListener | null, + options?: RemoveEventListenerOptions | boolean + ): void; + public removeEventListener(type: unknown, callback: unknown, options?: unknown): void { + const eventType = type as keyof TransferEventListeners; + const listeners = this.eventListeners[eventType]; + + if (listeners) { + if (eventType === "transferInitiated" || eventType === "bytesTransferred" || eventType === "transferFailed") { + const eventListener = callback as EventListener; + const index = listeners.indexOf(eventListener); + if (index !== -1) { + listeners.splice(index, 1); + } + } else if (eventType === "transferComplete") { + const eventListener = callback as EventListener; + const index = (listeners as EventListener[]).indexOf(eventListener); + if (index !== -1) { + (listeners as EventListener[]).splice(index, 1); + } + } else { + throw new Error(`Unknown event type: ${type}`); + } + } + } + + public upload(request: UploadRequest, transferOptions?: TransferOptions): Promise { + throw new Error("Method not implemented."); + } + + public async download(request: DownloadRequest, transferOptions?: TransferOptions): Promise { + const metadata = {} as Omit; + const streams = [] as StreamingBlobPayloadOutputTypes[]; + const requests = [] as GetObjectCommandInput[]; + + const partNumber = request.PartNumber; + const range = request.Range; + let totalSize: number | undefined; + + if (transferOptions?.eventListeners) { + for await (const listeners of this.iterateListeners(transferOptions?.eventListeners)) { + for (const listener of listeners) { + this.addEventListener(listener.eventType, listener.callback as EventListener); + } + } + } + + // TODO: Ensure download operation is treated as single object download when partNumber is provided regardless of multipartDownloadType setting + if (typeof partNumber === "number") { + const getObjectRequest = { + ...request, + PartNumber: partNumber, + }; + const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + + this.dispatchEvent( + Object.assign(new Event("transferInitiated"), { + request, + snapshot: { + transferredBytes: 0, + totalBytes: getObject.ContentLength, + }, + }) + ); + + if (getObject.Body) { + streams.push(getObject.Body); + requests.push(getObjectRequest); + } + this.assignMetadata(metadata, getObject); + } else if (this.multipartDownloadType === "PART") { + if (range == null) { + const initialPartRequest = { + ...request, + PartNumber: 1, + }; + const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); + const initialETag = initialPart.ETag ?? undefined; + totalSize = initialPart.ContentRange ? parseInt(initialPart.ContentRange.split("/")[1]) : undefined; + + this.dispatchTransferInitiatedEvent(request, totalSize); + if (initialPart.Body) { + streams.push(initialPart.Body); + requests.push(initialPartRequest); + } + this.assignMetadata(metadata, initialPart); + + if (initialPart.PartsCount! > 1) { + let previousPart = initialPart; + for (let part = 2; part <= initialPart.PartsCount!; part++) { + const getObjectRequest = { + ...request, + PartNumber: part, + IfMatch: !request.VersionId ? initialETag : undefined, + }; + const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + + if (getObject.ContentRange && previousPart.ContentRange) { + this.validateExpectedRanges(previousPart.ContentRange, getObject.ContentRange, part); + } + + if (getObject.Body) { + streams.push(getObject.Body); + requests.push(getObjectRequest); + } + this.assignMetadata(metadata, getObject); + previousPart = getObject; + } + } + } else { + const getObjectRequest = { + ...request, + }; + const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + totalSize = getObject.ContentRange ? parseInt(getObject.ContentRange.split("/")[1]) : undefined; + + this.dispatchTransferInitiatedEvent(request, totalSize); + if (getObject.Body) { + streams.push(getObject.Body); + requests.push(getObjectRequest); + } + this.assignMetadata(metadata, getObject); + } + } else if (this.multipartDownloadType === "RANGE") { + let initialETag = undefined; + let left = 0; + let right = S3TransferManager.MIN_PART_SIZE; + let maxRange = Infinity; + + if (range != null) { + const [userRangeLeft, userRangeRight] = range.replace("bytes=", "").split("-").map(Number); + + maxRange = userRangeRight; + left = userRangeLeft; + right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE); + } + + let remainingLength = 1; + let transferInitiatedEventDispatched = false; + + // TODO: Validate ranges for if multipartDownloadType === "RANGE" + while (remainingLength > 0) { + const range = `bytes=${left}-${right}`; + const getObjectRequest: GetObjectCommandInput = { + ...request, + Range: range, + IfMatch: transferInitiatedEventDispatched && !request.VersionId ? initialETag : undefined, + }; + const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + + if (!transferInitiatedEventDispatched) { + totalSize = getObject.ContentRange ? parseInt(getObject.ContentRange.split("/")[1]) : undefined; + + this.dispatchTransferInitiatedEvent(request, totalSize); + initialETag = getObject.ETag ?? undefined; + transferInitiatedEventDispatched = true; + } + + if (getObject.Body) { + streams.push(getObject.Body); + requests.push(getObjectRequest); + + // TODO: + // after completing SEP requirements: + // - acquire lock on webstreams in the same + // - synchronous frame as they are opened or else + // - the connection might be closed too early. + if (typeof (getObject.Body as ReadableStream).getReader === "function") { + const reader = (getObject.Body as any).getReader(); + (getObject.Body as any).getReader = function () { + return reader; + }; + } + } + this.assignMetadata(metadata, getObject); + + left = right + 1; + right = Math.min(left + S3TransferManager.MIN_PART_SIZE, maxRange); + + remainingLength = Math.min( + right - left, + Math.max(0, (getObject.ContentLength ?? 0) - S3TransferManager.MIN_PART_SIZE) + ); + } + } + + const responseBody = joinStreams(streams, { + onBytes: (byteLength: number, index) => { + this.dispatchEvent( + Object.assign(new Event("bytesTransferred"), { + request: requests[index], + snapshot: { + transferredBytes: byteLength, + totalBytes: totalSize, + }, + }) + ); + }, + onCompletion: (byteLength: number, index) => { + this.dispatchEvent( + Object.assign(new Event("transferComplete"), { + request: requests[index], + response: { + ...metadata, + Body: responseBody, + }, + snapshot: { + transferredBytes: byteLength, + totalBytes: totalSize, + }, + }) + ); + }, + onFailure: (error: unknown, index) => { + this.dispatchEvent( + Object.assign(new Event("transferFailed"), { + request: requests[index], + snapshot: { + transferredBytes: error, + totalBytes: totalSize, + }, + }) + ); + }, + }); + + const response = { + ...metadata, + Body: responseBody, + }; + + return response; + } + + public uploadAll(options: { + bucket: string; + source: string; + followSymbolicLinks?: boolean; + recursive?: boolean; + s3Prefix?: string; + filter?: (filepath: string) => boolean; + s3Delimiter?: string; + putObjectRequestCallback?: (putObjectRequest: PutObjectCommandInput) => Promise; + failurePolicy?: (error?: unknown) => Promise; + transferOptions?: TransferOptions; + }): Promise<{ objectsUploaded: number; objectsFailed: number }> { + throw new Error("Method not implemented."); + } + + public downloadAll(options: { + bucket: string; + destination: string; + s3Prefix?: string; + s3Delimiter?: string; + recursive?: boolean; + filter?: (object?: S3Object) => boolean; + getObjectRequestCallback?: (getObjectRequest: GetObjectCommandInput) => Promise; + failurePolicy?: (error?: unknown) => Promise; + transferOptions?: TransferOptions; + }): Promise<{ objectsDownloaded: number; objectsFailed: number }> { + throw new Error("Method not implemented."); + } + + private assignMetadata(container: any, response: any) { + for (const key in response) { + if (key === "Body") { + continue; + } + container[key] = response[key]; + } + } + + private validateConfig(): void { + if (this.targetPartSizeBytes < S3TransferManager.MIN_PART_SIZE) { + throw new Error(`targetPartSizeBytes must be at least ${S3TransferManager.MIN_PART_SIZE} bytes`); + } + } + + private dispatchTransferInitiatedEvent(request: DownloadRequest | UploadRequest, totalSize?: number): boolean { + this.dispatchEvent( + Object.assign(new Event("transferInitiated"), { + request, + snapshot: { + transferredBytes: 0, + totalBytes: totalSize, + }, + }) + ); + return true; + } + + /** + * For debugging purposes + * + * @internal + */ + private logCallbackCount(type: unknown): void { + const eventType = type as keyof TransferEventListeners; + const listeners = this.eventListeners[eventType]; + + console.log(`Callback count for ${eventType}: `); + let count = 0; + if (listeners) { + for (const callbacks of listeners) { + count++; + } + } + console.log(count); + } + + private async *iterateListeners(eventListeners: TransferEventListeners) { + for (const eventType in eventListeners) { + const listeners = eventListeners[eventType as keyof TransferEventListeners]; + if (listeners) { + for (const callback of listeners) { + yield [ + { + eventType: eventType, + callback: callback, + }, + ]; + } + } + } + } + + private validateExpectedRanges(previousPart: string, currentPart: string, partNum: number) { + const parseContentRange = (range: string) => { + const match = range.match(/bytes (\d+)-(\d+)\/(\d+)/); + if (!match) throw new Error(`Invalid ContentRange format: ${range}`); + return { + start: parseInt(match[1]), + end: parseInt(match[2]), + total: parseInt(match[3]), + }; + }; + + // TODO: throw error for incomplete download. + // Ex: final part and 8 bytes short should throw error -> "bytes 10485760-13631480/13631488" + + try { + const previous = parseContentRange(previousPart); + const current = parseContentRange(currentPart); + + const expectedStart = previous.end + 1; + const prevPartSize = previous.end - previous.start + 1; + const currPartSize = current.end - current.start + 1; + + if (current.start !== expectedStart) { + throw new Error(`Expected part ${partNum} to start at ${expectedStart} but got ${current.start}`); + } + + // console.log(currPartSize < prevPartSize); + // console.log(current.end !== current.total - 1); + if (currPartSize < prevPartSize && current.end !== current.total - 1) { + throw new Error( + `Final part did not cover total range of ${current.total}. Expected range of bytes ${current.start}-${ + currPartSize - 1 + }` + ); + } + } catch (error) { + throw new Error(`Range validation failed: ${error.message}`); + } + } +} diff --git a/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts b/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts new file mode 100644 index 000000000000..1c2bc188c5a4 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts @@ -0,0 +1,58 @@ +/** + * Function type for handling transfer events in the transfer manager. + * Represents a callback that receives event data during transfer operations. + * + * @param event - The event object containing transfer details and progress information. + + * @public + */ +export type EventListenerFunction = (event: Event & E) => void; + +/** + * Union type for handling transfer events in the transfer manager. + * Can be a function or an object. + * + * @public + */ +export type EventListener = EventListenerFunction | EventListenerObject; + +/** + * Object type for handling transfer events in the transfer manager. + * Represents an object that implements the `handleEvent` method to handle transfer events. + * + * @public + */ +export type EventListenerObject = { + handleEvent: EventListenerFunction; +}; + +/** + * Configuration options for registering event listeners in the transfer manager. + * Controls the behavior of event listeners for transfer events. + * + * @public + */ +export type AddEventListenerOptions = { + /** + * A boolean value indicating that the listener should be invoked at most once + * after being added. If true, the listener would be automatically removed when invoked. + * If not specified, defaults to false. + */ + once?: boolean; + /** + * An AbortSignal. The listener will be removed when the abort() method of the + * AbortController which owns the AbortSignal is called. If not specified, no + * AbortSignal is associated with the listener. + */ + signal?: AbortSignal; +}; + +/** + * Configuration options for removing event listeners in the transfer manager. + * Controls the behavior of event listeners for transfer events. + * + * @public + */ +export type RemoveEventListenerOptions = { + capture?: boolean; +}; diff --git a/lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts b/lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts new file mode 100644 index 000000000000..b5f9be23949f --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts @@ -0,0 +1,177 @@ +// // Import S3 Client client +// import { S3Client } from "@aws-sdk/client-s3"; +// import { dirname } from "node:path"; +// import { DefaultDeserializer } from "node:v8"; + +// import { S3TransferManager } from "../index"; +// // Import transfer manager +// import { +// TransferCompleteEvent, +// TransferEvent, +// TransferEventListeners, +// TransferProgressSnapshot, +// } from "../types"; // would be "@aws-sdk/lib-storage" + +// // Test variables: +// const testBucket = "test-bucket"; +// const testKey = "test-key"; +// const fileStream = "test-body"; +// const DEFAULT_BYTE_SIZE = 8 * 1024 * 1024; + +// // Example 1: Basic multipart upload and multipart download operations. +// // Initialize S3 client and transfer manager +// const s3Client = new S3Client({}); +// const transferManager = new S3TransferManager({ +// s3ClientInstance: s3Client, +// targetPartSizeBytes: DEFAULT_BYTE_SIZE, +// multipartUploadThresholdBytes: 2 * DEFAULT_BYTE_SIZE, // 16 MB +// checksumValidationEnabled: true, +// checksumAlgorithm: "CRC32", +// multipartDownloadType: "RANGE", +// }); + +// // Perform multipart upload. +// async function uploadLargeFile() { +// const response = await transferManager.upload({ +// Bucket: testBucket, +// Key: testKey, +// Body: fileStream, +// }); +// } + +// // Perform multipart download. +// async function downloadLargeFile() { +// const response = await transferManager.download({ +// Bucket: testBucket, +// Key: testKey, +// Range: "16 MB", +// checksumValidationEnabled: true, +// destinationPath: "user/desktop/...", +// }); +// } + +// //Example 2: Upload all files in directory recursively to an S3 Bucket. +// async function uploadDirectoryToBucket() { +// const response = await transferManager.uploadAll({ +// bucket: testBucket, +// source: "user/desktop/...", +// recursive: true, +// }); +// } + +// //Example 3: Download all files in an S3 Bucket recursively to a directory. +// async function downloadDirectoryToBucket() { +// const response = await transferManager.downloadAll({ +// bucket: testBucket, +// destination: "user/desktop/...", +// recursive: true, +// }); +// } + +// //Example 4: Upload with abort. +// async function uploadWithAbort() { +// const abortController = new AbortController(); + +// const uploadResponse = await transferManager.upload({ +// bucket: testBucket, +// key: testKey, +// body: fileStream, +// abortSignal: abortController.signal, +// }); + +// // Abort after 100 ms +// setTimeout(() => { +// console.log("Aborting..."); +// abortController.abort(); +// }, 100); + +// try { +// const response = await uploadResponse; +// console.log("Upload completed successfully."); +// } catch (error) { +// console.log("Upload aborted."); +// } +// } + +// //Example 5: Request Level Progress Listener +// async function downloadWithProgressListener() { +// const abortController = new AbortController(); + +// const transferOptions = { +// abortSignal: abortController.signal, +// transferInitiated: [ +// (event: TransferEvent) => { +// console.log("Transfer Initiated"); +// }, +// ], +// bytesTransferred: [ +// (event: TransferEvent) => { +// const progress = event.snapshot; +// if (progress.totalBytes != undefined) { +// const percent = (progress.transferredBytes / progress.totalBytes) * 100; +// console.log(`Transfer Progress: ${percent}%`); +// } +// }, +// ], +// transferComplete: [ +// (event: TransferCompleteEvent) => { +// console.log("Transfer Complete"); +// }, +// ], +// transferFailed: [ +// (event: TransferEvent) => { +// console.log("Transfer Failed"); +// }, +// ], +// }; + +// const downloadResponse = await transferManager.download( +// { +// bucket: testBucket, +// key: testKey, +// multipartDownloadType: "RANGE", +// range: DEFAULT_BYTE_SIZE, +// checksumValidationEnabled: true, +// destinationPath: "user/desktop/...", +// }, +// transferOptions +// ); + +// return downloadResponse; +// } + +// //Example 6: Client Level Progress Listener +// const transferManager2 = new S3TransferManager({ +// s3ClientInstance: s3Client, +// targetPartSizeBytes: DEFAULT_BYTE_SIZE, +// multipartUploadThresholdBytes: 2 * DEFAULT_BYTE_SIZE, // 16 MB +// checksumValidationEnabled: true, +// checksumAlgorithm: "CRC32", +// multipartDownloadType: "RANGE", +// transferProgressListeners: { +// transferInitiated: [ +// (event: TransferEvent) => { +// console.log("Transfer Initiated"); +// }, +// ], +// bytesTransferred: [ +// (event: TransferEvent) => { +// const progress = event.snapshot; +// if (progress.totalBytes != undefined) { +// const percent = (progress.transferredBytes / progress.totalBytes) * 100; +// console.log(`Transfer Progress: ${percent}%`); +// } +// }, +// ], +// transferComplete: [ +// (event: TransferCompleteEvent) => { +// console.log("Transfer Complete"); +// }, +// ], +// transferFailed: [ +// (event: TransferEvent) => { +// console.log("Transfer Failed"); +// }, +// ], +// }, +// }); diff --git a/lib/lib-storage/src/s3-transfer-manager/index.ts b/lib/lib-storage/src/s3-transfer-manager/index.ts new file mode 100644 index 000000000000..c5fd2ea54f9d --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/index.ts @@ -0,0 +1,3 @@ +export { S3TransferManager } from "./S3TransferManager"; +export type { IS3TransferManager } from "./types"; +export type {} from "./event-listener-types"; diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts new file mode 100644 index 000000000000..5b2b13d3d8b4 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -0,0 +1,43 @@ +import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { isBlob, isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; + +// check all types. needs to join nodejs and browser together +export function joinStreams(streams: StreamingBlobPayloadOutputTypes[]): StreamingBlobPayloadOutputTypes { + console.log("Is Readable Stream: "); + console.log(isReadableStream(streams[0])); + + if (streams.length === 1) { + return streams[0]; + } else if (isReadableStream(streams[0]) || isBlob(streams[0])) { + const newReadableStream = new ReadableStream({ + async start(controller) { + for await (const chunk of iterateStreams(streams)) { + controller.enqueue(chunk); + } + controller.close(); + }, + }); + return sdkStreamMixin(newReadableStream); + } else { + throw new Error("Unknown stream type"); + } +} + +export async function* iterateStreams( + streams: StreamingBlobPayloadOutputTypes[] +): AsyncIterable { + for (const stream of streams) { + if (isReadableStream(stream)) { + const reader = (stream as ReadableStream).getReader(); + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + yield value; + } + } finally { + reader.releaseLock(); + } + } + } +} diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts new file mode 100644 index 000000000000..574c99aba51f --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -0,0 +1,71 @@ +import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { isBlob, isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; +import { Readable } from "stream"; + +import { JoinStreamIterationEvents } from "./types"; + +// TODO: check all types. needs to join nodejs and browser together +export function joinStreams( + streams: StreamingBlobPayloadOutputTypes[], + eventListeners?: JoinStreamIterationEvents +): StreamingBlobPayloadOutputTypes { + if (streams.length === 1) { + return streams[0]; + } else if (isReadableStream(streams[0])) { + const newReadableStream = new ReadableStream({ + async start(controller) { + for await (const chunk of iterateStreams(streams, eventListeners)) { + controller.enqueue(chunk); + } + controller.close(); + }, + }); + return sdkStreamMixin(newReadableStream); + } else if (isBlob(streams[0])) { + throw new Error("Blob not supported yet"); + } else { + return sdkStreamMixin(Readable.from(iterateStreams(streams, eventListeners))); + } +} + +export async function* iterateStreams( + streams: StreamingBlobPayloadOutputTypes[], + eventListeners?: JoinStreamIterationEvents +): AsyncIterable { + let bytesTransferred = 0; + let index = 0; + for (const stream of streams) { + if (isReadableStream(stream)) { + // const reader = stream.getReader(); + // while (true) { + // const { done, value } = await reader.read(); + // if (done) { + // break; + // } + // yield value; + // bytesTransferred += value.byteLength; + // } + // reader.releaseLock(); + + const failure = new Error(`ReadableStreams not supported yet ${(stream as any)?.constructor?.name}`); + eventListeners?.onFailure?.(failure, index); + throw failure; + } else if (isBlob(stream)) { + throw new Error("Blob not supported yet"); + } else if (stream instanceof Readable) { + for await (const chunk of stream) { + yield chunk; + const chunkSize = Buffer.isBuffer(chunk) ? chunk.length : Buffer.byteLength(chunk); + bytesTransferred += chunkSize; + + eventListeners?.onBytes?.(bytesTransferred, index); + } + } else { + const failure = new Error(`unhandled stream type ${(stream as any)?.constructor?.name}`); + eventListeners?.onFailure?.(failure, index); + throw failure; + } + index++; + } + eventListeners?.onCompletion?.(bytesTransferred, index - 1); +} diff --git a/lib/lib-storage/src/s3-transfer-manager/stream-guards.ts b/lib/lib-storage/src/s3-transfer-manager/stream-guards.ts new file mode 100644 index 000000000000..7ea282c74d6f --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/stream-guards.ts @@ -0,0 +1,15 @@ +// * confirm if filestream fits here * + +import { Readable } from "stream"; + +// will not work with browser because no readable in browser +export function isNodeStream(stream: unknown): stream is Readable { + return typeof stream === "object" && stream !== null && "pipe" in stream && typeof stream.pipe === "function"; +} + +export function isWebStream(stream: unknown): stream is ReadableStream | Blob { + return ( + (typeof ReadableStream !== "undefined" && stream instanceof ReadableStream) || + (typeof Blob !== "undefined" && stream instanceof Blob) + ); +} diff --git a/lib/lib-storage/src/s3-transfer-manager/types.ts b/lib/lib-storage/src/s3-transfer-manager/types.ts new file mode 100644 index 000000000000..6b6e81f6d6b1 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/types.ts @@ -0,0 +1,344 @@ +import type { + _Object as S3Object, + ChecksumAlgorithm, + CompleteMultipartUploadCommandOutput, + CreateMultipartUploadCommandInput, + GetObjectCommandInput, + GetObjectCommandOutput, + PutObjectCommandInput, + PutObjectCommandOutput, + S3Client, +} from "@aws-sdk/client-s3"; +import { HttpHandlerOptions } from "@smithy/types"; + +import { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; + +/** + * Constructor parameters for the S3 Transfer Manager configuration. + * + * @public + */ +export interface S3TransferManagerConfig { + /** + * The low level S3 client that will be used to send requests to S3. + */ + s3ClientInstance?: S3Client; + /** + * The target part size to use in a multipart transfer. Does not apply to downloads if multipartDownloadType is PART. + */ + targetPartSizeBytes?: number; + /** + * The size threshold, in bytes, for when to use multipart upload. + */ + multipartUploadThresholdBytes?: number; + /** + * Option for whether to use checksum validation for download. + */ + checksumValidationEnabled?: boolean; + /** + * Checksum algorithm to use for upload. + */ + checksumAlgorithm?: ChecksumAlgorithm; + /** + * How the SDK should perform multipart download, either RANGE or PART. + */ + multipartDownloadType?: "RANGE" | "PART"; + /** + * Collection of callbacks for monitoring transfer lifecycle events. Allows tracking statuses of all transfers from the client. + */ + eventListeners?: TransferEventListeners; +} + +/** + * Uses intersection because requests includes all the required parameters from + * both PutObjectCommandInput and CreateMultipartUploadCommandInput to support both single object + * and multipart upload requests. + * + * @public + */ +export type UploadRequest = PutObjectCommandInput & CreateMultipartUploadCommandInput; + +/** + * Uses union because the responses can vary from single object upload response to multipart upload + * response depending on the request. + * + * @public + */ +export type UploadResponse = PutObjectCommandOutput | CompleteMultipartUploadCommandOutput; + +/** + * Features the same properties as SDK JS S3 Command GetObjectCommandInput. + * Created to standardize naming convention for TM APIs. + * + * @public + */ +export type DownloadRequest = GetObjectCommandInput; + +/** + * Features the same properties as SDK JS S3 Command GetObjectCommandOutput. + * Created to standardize naming convention for TM APIs. + * + * @public + */ +export type DownloadResponse = GetObjectCommandOutput; + +/** + * Options for transfer operations that combine HTTP handler options with transfer event listeners. + * + * @property eventListeners - Collection of callbacks for monitoring transfer lifecycle events + * + * @public + */ +export type TransferOptions = HttpHandlerOptions & { eventListeners?: TransferEventListeners }; + +/** + * Client for efficient transfer of objects to and from Amazon S3. + * Provides methods to optimize uploading and downloading individual objects + * as well as entire directories, with support for multipart operations, + * concurrency control, and request cancellation. + * Implements an event-based progress tracking system with methods to register, + * dispatch, and remove listeners for transfer lifecycle events. + * + * @public + */ +export interface IS3TransferManager { + /** + * Lets users upload single objects from a given directory to a given bucket. + * Supports multipart upload, single object upload, and transfer progress listeners. + * + * @param request - All properties of a single or multipart upload request. + * @param transferOptions - Allows users to specify cancel functions for the request and a collection of callbacks for monitoring transfer lifecycle events. Allows tracking statuses per request. + * + * @returns The response from the S3 API for the upload request. + */ + upload(request: UploadRequest, transferOptions?: TransferOptions): Promise; + + /** + * Lets users download single objects from a given bucket to a given directory. + * Supports multipart download, single object download, and transfer progress listeners. + * + * @param request - All properties of a single or multipart upload request. + * @param transferOptions - Allows users to specify cancel functions for the request and a collection of callbacks for monitoring transfer lifecycle events. Allows tracking statuses per request. + * + * @returns the response from the S3 API for the download request. + */ + download(request: DownloadRequest, transferOptions?: TransferOptions): Promise; + + /** + * Represents an API to upload all files under the given directory to the provided S3 bucket. + * + * @param options.bucket - The name of the bucket to upload objects to. + * @param options.source - The source directory to upload. + * @param options.followSymbolicLinks - Whether to follow symbolic links when traversing the file tree. + * @param options.recursive - Whether to upload directories recursively. + * @param options.s3Prefix - The S3 key prefix to use for each object. If not provided, files will be uploaded to the root of the bucket todo(). + * @param options.filter - A callback to allow users to filter out unwanted S3 object. It is invoked for each S3 object. An example implementation is a predicate that takes an S3Object and returns a boolean indicating whether this S3Object should be uploaded. + * @param options.s3Delimiter - Default "/". The S3 delimiter. A delimiter causes a list operation to roll up all the keys that share a common prefix into a single summary list result. + * @param options.putObjectRequestCallback - A callback mechanism to allow customers to update individual putObjectRequest that the S3 Transfer Manager generates. + * @param options.failurePolicy - The failure policy to handle failed requests. + * @param options.transferOptions - Allows supplying an AbortSignal and/or transfer event listeners. + * + * @returns the number of objects that have been uploaded and the number of objects that have failed + */ + uploadAll(options: { + bucket: string; + source: string; + followSymbolicLinks?: boolean; + recursive?: boolean; + s3Prefix?: string; + filter?: (filepath: string) => boolean; + s3Delimiter?: string; + putObjectRequestCallback?: (putObjectRequest: PutObjectCommandInput) => Promise; + failurePolicy?: (error?: unknown) => Promise; + transferOptions?: TransferOptions; + }): Promise<{ + objectsUploaded: number; + objectsFailed: number; + }>; + + /** + * Represents an API to download all objects under a bucket to the provided local directory. + * + * @param options.bucket - The name of the bucket. + * @param options.destination - The destination directory. + * @param options.s3Prefix - Specify the S3 prefix that limits the response to keys that begin with the specified prefix. + * @param options.s3Delimiter - Specify the S3 delimiter. + * @param options.recursive - Whether to upload directories recursively. + * @param options.filter - A callback to allow users to filter out unwanted S3 object. It is invoked for each S3 object. An example implementation is a predicate that takes an S3Object and returns a boolean indicating whether this S3Object should be downloaded. + * @param options.getObjectRequestCallback - A callback mechanism to allow customers to update individual getObjectRequest that the S3 Transfer Manager generates. + * @param options.failurePolicy - The failure policy to handle failed requests. + * @param options.transferOptions - Allows supplying an AbortSignal and/or transfer event listeners. + * + * @returns The number of objects that have been uploaded and the number of objects that have failed + */ + downloadAll(options: { + bucket: string; + destination: string; + s3Prefix?: string; + s3Delimiter?: string; + recursive?: boolean; + filter?: (object?: S3Object) => boolean; + getObjectRequestCallback?: (getObjectRequest: GetObjectCommandInput) => Promise; + failurePolicy?: (error?: unknown) => Promise; + transferOptions?: TransferOptions; + }): Promise<{ + objectsDownloaded: number; + objectsFailed: number; + }>; + + /** + * Registers a callback function to be executed when a specific transfer event occurs. + * Supports monitoring the full lifecycle of transfers. + * + * @param type - The type of event to listen for. + * @param callback - Function to execute when the specified event occurs. + * @param options - Optional configuration for event listener behavior. + * + * @public + */ + addEventListener( + type: "transferInitiated", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + addEventListener( + type: "bytesTransferred", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + addEventListener( + type: "transferComplete", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + addEventListener( + type: "transferFailed", + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void; + addEventListener(type: string, callback: EventListener | null, options?: AddEventListenerOptions | boolean): void; + + /** + * Dispatches an event to the registered event listeners. + * Triggers callbacks registered via addEventListener with matching event types. + * + * @param event - The event object to dispatch. + * @returns whether the event dispatched successfully + * + * @public + */ + dispatchEvent(event: Event & TransferEvent): boolean; + dispatchEvent(event: Event & TransferCompleteEvent): boolean; + dispatchEvent(event: Event): boolean; + + /** + * Removes a previously registered event listener from the specified event type. + * Stops the callback from being invoked when the event occurs. + * + * @param type - The type of event to stop listening for. + * @param callback - The function that was previously registered. + * @param options - Optional configuration for the event listener. + * + * @public + */ + removeEventListener( + type: "transferInitiated", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + removeEventListener( + type: "bytesTransferred", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + removeEventListener( + type: "transferComplete", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + removeEventListener( + type: "transferFailed", + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void; + removeEventListener( + type: string, + callback: EventListener | null, + options?: RemoveEventListenerOptions | boolean + ): void; +} + +/** + * Provides a snapshot of the progress during a single object transfer. + * + * @public + */ +export interface SingleObjectProgressSnapshot { + transferredBytes: number; + totalBytes?: number; + response?: UploadResponse | DownloadResponse; +} + +/** + * Provides a snapshot of the progress during a directory transfer. + * + * @public + */ +export interface DirectoryProgressSnapshot { + transferredBytes: number; + totalBytes?: number; + transferredFiles: number; + totalFiles?: number; +} + +/** + * Progress snapshot for either single object transfers or directory transfers. + * + * @public + */ +export type TransferProgressSnapshot = SingleObjectProgressSnapshot | DirectoryProgressSnapshot; + +/** + * Event interface for transfer progress events. + * Used for tracking ongoing transfers with the original request and progress snapshot. + * + * @public + */ +export interface TransferEvent extends Event { + request: UploadRequest | DownloadRequest; + snapshot: TransferProgressSnapshot; +} + +/** + * Event interface for transfer completion. + * Extends TransferEvent with response data that is received after a completed transfer. + * + * @public + */ +export interface TransferCompleteEvent extends TransferEvent { + response: UploadResponse | DownloadResponse; +} + +/** + * Collection of event handlers to monitor transfer lifecycle events. + * Allows a way to register callbacks for each stage of the transfer process. + * + * @public + */ +export interface TransferEventListeners { + transferInitiated?: EventListener[]; + bytesTransferred?: EventListener[]; + transferComplete?: EventListener[]; + transferFailed?: EventListener[]; +} + +/** + * Event listener type. + * + * @public + */ +export interface JoinStreamIterationEvents { + onBytes?: (byteLength: number, index: number) => void; + onCompletion?: (byteLength: number, index: number) => void; + onFailure?: (error: unknown, index: number) => void; +} diff --git a/tests/react-native/End2End/android/gradle/wrapper/gradle-wrapper.properties b/tests/react-native/End2End/android/gradle/wrapper/gradle-wrapper.properties index ee69dd68d1a6..3ab114e6e74f 100755 --- a/tests/react-native/End2End/android/gradle/wrapper/gradle-wrapper.properties +++ b/tests/react-native/End2End/android/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,6 @@ +#Mon Jun 16 18:19:18 UTC 2025 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists From 49fd8011e9e6881f2b501d9b25c0cf9a8cfef5c3 Mon Sep 17 00:00:00 2001 From: George Fu Date: Mon, 21 Jul 2025 15:29:56 -0400 Subject: [PATCH 02/30] feat(lib-storage): concurrent requests --- .vscode/settings.json | 3 +- .../S3TransferManager.e2e.spec.ts | 27 +- .../S3TransferManager.spec.ts | 570 ++++++++++++++- .../s3-transfer-manager/S3TransferManager.ts | 647 ++++++++++-------- .../join-streams.browser.ts | 48 +- .../src/s3-transfer-manager/join-streams.ts | 47 +- .../src/s3-transfer-manager/types.ts | 10 +- 7 files changed, 970 insertions(+), 382 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 67b515c68994..9f41be91d5d0 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -11,5 +11,6 @@ "source.fixAll.eslint": "explicit" }, "typescript.tsdk": "node_modules/typescript/lib", - "vitest.disableWorkspaceWarning": true + "vitest.disableWorkspaceWarning": true, + "java.configuration.updateBuildConfiguration": "interactive" } diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 3f5b6e83e794..707391bfb0d2 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -31,15 +31,16 @@ describe(S3TransferManager.name, () => { let region: string; beforeAll(async () => { - // const integTestResourcesEnv = await getIntegTestResources(); - // Object.assign(process.env, integTestResourcesEnv); + // TODO: replace hard coded region and bucket with integration test resources. + const integTestResourcesEnv = await getIntegTestResources(); + Object.assign(process.env, integTestResourcesEnv); - // region = process?.env?.AWS_SMOKE_TEST_REGION as string; - // Bucket = process?.env?.AWS_SMOKE_TEST_BUCKET as string; + region = process?.env?.AWS_SMOKE_TEST_REGION as string; + Bucket = process?.env?.AWS_SMOKE_TEST_BUCKET as string; void getIntegTestResources; - region = "us-west-2"; - Bucket = "lukachad-us-west-2"; + // region = "us-west-2"; + // Bucket = "lukachad-us-west-2"; client = new S3({ region, @@ -54,7 +55,7 @@ describe(S3TransferManager.name, () => { }); }, 120_000); - describe.skip("multi part download", () => { + describe("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; const sizes = [6, 11] as number[]; @@ -92,13 +93,12 @@ describe(S3TransferManager.name, () => { }, { eventListeners: { - transferInitiated: [({ request, snapshot }) => {}], bytesTransferred: [ ({ request, snapshot }) => { bytesTransferred = snapshot.transferredBytes; + // console.log(bytesTransferred); }, ], - transferComplete: [({ request, snapshot, response }) => {}], }, } ); @@ -111,7 +111,7 @@ describe(S3TransferManager.name, () => { } }); - describe("(SEP) download single object tests", () => { + describe.skip("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", @@ -151,7 +151,7 @@ describe(S3TransferManager.name, () => { const serialized = await download.Body?.transformToString(); check(serialized); if (partNumber) { - expect(serialized?.length).toEqual(DEFAULT_PART_SIZE); + expect(serialized?.length).toEqual(4 * 1024 * 1024); // Part 1 is 8MB Part 2 is 4MB } else { expect(serialized?.length).toEqual(Body.length); } @@ -163,10 +163,11 @@ describe(S3TransferManager.name, () => { it("multipart object: multipartDownloadType = RANGE, range = 0-12MB, partNumber = null", async () => { await sepTests("multipart", "RANGE", `bytes=0-${12 * 1024 * 1024}`, undefined); }, 60_000); - it("single object: multipartDownloadType = PART, range = null, partNumber = 2", async () => { + // skipped because TM no longer supports partNumber + it.skip("single object: multipartDownloadType = PART, range = null, partNumber = 2", async () => { await sepTests("single", "PART", undefined, 2); }, 60_000); - it("single object: multipartDownloadType = RANGE, range = null, partNumber = 2", async () => { + it.skip("single object: multipartDownloadType = RANGE, range = null, partNumber = 2", async () => { await sepTests("single", "RANGE", undefined, 2); }, 60_000); it("single object: multipartDownloadType = PART, range = null, partNumber = null", async () => { diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index b8d56721d17a..139b0dceadc4 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -1,7 +1,11 @@ import { S3, S3Client } from "@aws-sdk/client-s3"; import { TransferCompleteEvent, TransferEvent } from "@aws-sdk/lib-storage/dist-types/s3-transfer-manager/types"; +import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { Readable } from "stream"; import { beforeAll, beforeEach, describe, expect, test as it, vi } from "vitest"; +import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; +import { iterateStreams, joinStreams } from "./join-streams"; import { S3TransferManager } from "./S3TransferManager"; /** @@ -22,8 +26,15 @@ describe("S3TransferManager Unit Tests", () => { let region: string; beforeAll(async () => { - region = "us-west-1"; - Bucket = "lukachad-us-west-2"; + const integTestResourcesEnv = await getIntegTestResources(); + Object.assign(process.env, integTestResourcesEnv); + + region = process?.env?.AWS_SMOKE_TEST_REGION as string; + Bucket = process?.env?.AWS_SMOKE_TEST_BUCKET as string; + void getIntegTestResources; + + // region = "us-west-1"; + // Bucket = "lukachad-us-west-2"; client = new S3({ region, @@ -175,7 +186,7 @@ describe("S3TransferManager Unit Tests", () => { }).toThrow("Unknown event type: invalidEvent"); }); - it("Should handle options.once correctly", () => { + it("Should handle options.once correctly, running the listener at most once.", () => { const mockCallback = vi.fn(); tm.addEventListener("transferInitiated", mockCallback, { once: true }); @@ -190,6 +201,50 @@ describe("S3TransferManager Unit Tests", () => { expect(mockCallback).toHaveBeenCalledTimes(1); }); + it("Should not add listener if included AbortSignal is aborted", () => { + const controller = new AbortController(); + const callback = vi.fn(); + controller.abort(); + tm.addEventListener("transferInitiated", callback, { signal: controller.signal }); + expect((tm as any).eventListeners.transferInitiated).toEqual([]); + }); + + it("Should remove listener after included AbortSignal was aborted", () => { + const controller = new AbortController(); + const callback = vi.fn(); + tm.addEventListener("transferInitiated", callback, { signal: controller.signal }); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + tm.dispatchEvent(event); + + expect(callback).toHaveBeenCalledTimes(1); + expect((tm as any).eventListeners.transferInitiated).toEqual([callback]); + + controller.abort(); + expect((tm as any).eventListeners.transferInitiated).toEqual([]); + }); + + it("Should clean up abort listeners and store cleanup functions in WeakMap", () => { + const controller = new AbortController(); + const callback = vi.fn(); + + tm.addEventListener("transferInitiated", callback, { signal: controller.signal }); + + expect((tm as any).eventListeners.transferInitiated).toEqual([callback]); + expect((tm as any).abortCleanupFunctions.has(controller.signal)).toBe(true); + + const cleanupFn = (tm as any).abortCleanupFunctions.get(controller.signal); + cleanupFn(); + (tm as any).abortCleanupFunctions.delete(controller.signal); + + expect((tm as any).abortCleanupFunctions.has(controller.signal)).toBe(false); + controller.abort(); + expect((tm as any).eventListeners.transferInitiated).toEqual([callback]); + }); + it("Should handle boolean options parameter", () => { tm.addEventListener("transferInitiated", initiated, true); expect((tm as any).eventListeners.transferInitiated).toContain(initiated); @@ -310,68 +365,511 @@ describe("S3TransferManager Unit Tests", () => { expect(result).toBe(true); }); - it("Should handle unknown event types", () => {}); + it("Should handle unknown event types", () => { + const event = Object.assign(new Event("unknownEvent"), { + request: {}, + snapshot: {}, + }); + + const results = tm.dispatchEvent(event); + expect(results).toBe(true); + }); + + it("Should handle a mix of object-style callbacks and functions", () => { + const callback = vi.fn(); + const objectCallback = { + handleEvent: vi.fn(), + }; + tm.addEventListener("transferInitiated", objectCallback as any); + tm.addEventListener("transferInitiated", callback); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + + expect(objectCallback.handleEvent).toHaveBeenCalledTimes(1); + expect(objectCallback.handleEvent).toHaveBeenCalledWith(event); + expect(callback).toHaveBeenCalledTimes(1); + expect(callback).toHaveBeenCalledWith(event); + }); + }); + + describe("removeEventListener()", () => { + it("Should remove only the specified listener, leaving other intact", () => { + const callback1 = vi.fn(); + const callback2 = vi.fn(); + tm.addEventListener("transferInitiated", callback1); + tm.addEventListener("transferInitiated", callback2); + + tm.removeEventListener("transferInitiated", callback1); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + + expect(callback1).not.toHaveBeenCalled(); + expect(callback2).toHaveBeenCalledTimes(1); + }); + + it("Should remove object-style callback with handleEvent", () => { + const objectCallback = { handleEvent: vi.fn() }; + tm.addEventListener("transferInitiated", objectCallback as any); + tm.removeEventListener("transferInitiated", objectCallback as any); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + expect(objectCallback.handleEvent).not.toHaveBeenCalled(); + }); + + it("Should remove all instance of the same callback", () => { + const callback = vi.fn(); + tm.addEventListener("transferInitiated", callback); + tm.addEventListener("transferInitiated", callback); + + tm.removeEventListener("transferInitiated", callback); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + + expect(callback).not.toHaveBeenCalled(); + }); + + it("Should handle removing non-existing listener gracefully", () => { + const callback = vi.fn(); + expect(() => { + tm.removeEventListener("transferInitiated", callback); + }).not.toThrow(); + }); + + it("Should handle removing from an event type with no listeners gracefully", () => { + const callback = vi.fn(); + tm.removeEventListener("transferInitiated", callback); + + const event = Object.assign(new Event("transferInitiated"), { + request: {}, + snapshot: {}, + }); + + tm.dispatchEvent(event); + + expect(callback).not.toHaveBeenCalled(); + }); + + it("Should handle null callback parameter", () => { + expect(() => { + tm.removeEventListener("transferInitiated", null as any); + }).not.toThrow(); + }); + }); + }); + + describe("iterateListeners()", () => { + let tm: S3TransferManager; - it("Should handle a mix of object-style callbacks and functions", () => {}); + beforeEach(async () => { + tm = new S3TransferManager({ + s3ClientInstance: client, + }); }); - describe.skip("removeEventListener()", () => { - it("Should remove a listener from an event", () => {}); + it("Should iterate over all listeners given a TransferManager's object of event listeners", () => { + const callback1 = vi.fn(); + const callback2 = vi.fn(); + const callback3 = vi.fn(); - it("Should remove a listener from an event", () => {}); + const eventListeners = { + transferInitiated: [callback1], + bytesTransferred: [callback2, callback3], + transferComplete: [], + transferFailed: [], + }; + + const results = Array.from((tm as any).iterateListeners(eventListeners)) as any[]; - it("Should remove a listener from an event", () => {}); + expect(results).toHaveLength(3); + expect(results[0][0]).toEqual({ eventType: "transferInitiated", callback: callback1 }); + expect(results[1][0]).toEqual({ eventType: "bytesTransferred", callback: callback2 }); + expect(results[2][0]).toEqual({ eventType: "bytesTransferred", callback: callback3 }); }); - describe.skip("iterateListeners()", () => {}); + it("Should handle empty event listeners object", () => { + const eventListeners = { + transferInitiated: [], + bytesTransferred: [], + transferComplete: [], + transferFailed: [], + }; + + const results = Array.from((tm as any).iterateListeners(eventListeners)) as any[]; - describe.skip("joinStreams()", () => {}); + expect(results).toHaveLength(0); + }); - describe.skip("iterateStreams()", () => {}); + it("Should iterate over a mix of functions and objects with handleEvent callback types.", () => { + const callback1 = vi.fn(); + const callback2 = vi.fn(); + const objectCallback = { + handleEvent: vi.fn(), + }; + + const eventListeners = { + transferInitiated: [callback1], + bytesTransferred: [], + transferComplete: [], + transferFailed: [callback2, objectCallback], + }; + + const results = Array.from((tm as any).iterateListeners(eventListeners)) as any[]; + + expect(results).toHaveLength(3); + expect(results[0][0]).toEqual({ eventType: "transferInitiated", callback: callback1 }); + expect(results[1][0]).toEqual({ eventType: "transferFailed", callback: callback2 }); + expect(results[2][0]).toEqual({ eventType: "transferFailed", callback: objectCallback }); + }); + + it("Should handle event lisetners with duplicate callbacks in the same event type", () => { + const callback = vi.fn(); + + const eventListeners = { + transferInitiated: [callback, callback], + bytesTransferred: [], + transferComplete: [callback, callback], + transferFailed: [], + }; + + const results = Array.from((tm as any).iterateListeners(eventListeners)) as any[]; + + expect(results).toHaveLength(4); + for (let i = 0; i < results.length; i++) { + expect(results[i][0]).toEqual({ eventType: results[i][0].eventType, callback }); + } + }); + + it("Should return empty iterator when no callbacks are present", () => { + const eventListeners = {}; + + const results = Array.from((tm as any).iterateListeners(eventListeners)) as any[]; + + expect(results).toHaveLength(0); + }); }); - describe("validateExpectedRanges()", () => { + describe("validatePartRange()", () => { let tm: any; beforeAll(async () => { tm = new S3TransferManager() as any; }, 120_000); - it("Should pass correct sequential ranges without throwing an error", () => { + it("Should pass correct ranges based on part number without throwing an error", () => { + const partSize = 5242880; const ranges = [ - "bytes 0-5242879/13631488", - "bytes 5242880-10485759/13631488", - "bytes 10485760-13631487/13631488", + { partNumber: 1, range: "bytes 0-5242879/13631488" }, + { partNumber: 2, range: "bytes 5242880-10485759/13631488" }, + { partNumber: 3, range: "bytes 10485760-13631487/13631488" }, ]; - for (let i = 1; i < ranges.length; i++) { + for (const { partNumber, range } of ranges) { expect(() => { - tm.validateExpectedRanges(ranges[i - 1], ranges[i], i + 1); + tm.validatePartRange(partNumber, range, partSize); }).not.toThrow(); } }); - it("Should throw error for incomplete download", () => { - const ranges = [ - "bytes 0-5242879/13631488", - "bytes 5242880-10485759/13631488", - "bytes 10485760-13631480/13631488", // 8 bytes short - ]; + it("Should throw error for incorrect start position", () => { + const partSize = 5242880; + + expect(() => { + tm.validatePartRange(2, "bytes 5242881-10485759/13631488", partSize); + }).toThrow("Expected part 2 to start at 5242880 but got 5242881"); + + expect(() => { + tm.validatePartRange(2, "bytes 5242879-10485759/13631488", partSize); + }).toThrow("Expected part 2 to start at 5242880 but got 5242879"); + + expect(() => { + tm.validatePartRange(2, "bytes 0-5242879/13631488", partSize); + }).toThrow("Expected part 2 to start at 5242880 but got 0"); + }); + + it("Should throw error for incorrect end position", () => { + const partSize = 5242880; + + expect(() => { + tm.validatePartRange(2, "bytes 5242880-10485760/13631488", partSize); + }).toThrow("Expected part 2 to end at 10485759 but got 10485760"); + + expect(() => { + tm.validatePartRange(3, "bytes 10485760-13631480/13631488", partSize); + }).toThrow("Expected part 3 to end at 13631487 but got 13631480"); + }); + + it("Should handle last part correctly when not a full part size", () => { + const partSize = 5242880; expect(() => { - tm.validateExpectedRanges(ranges[1], ranges[2], 3); - }).toThrow( - "Range validation failed: Final part did not cover total range of 13631488. Expected range of bytes 10485760-314572" - ); + tm.validatePartRange(3, "bytes 10485760-13631487/13631488", partSize); + }).not.toThrow(); }); - it.each([ - ["bytes 5242881-10485759/13631488", "Expected part 2 to start at 5242880 but got 5242881"], // 1 byte off - ["bytes 5242879-10485759/13631488", "Expected part 2 to start at 5242880 but got 5242879"], // overlap - ["bytes 0-5242879/13631488", "Expected part 2 to start at 5242880 but got 0"], // duplicate - ])("Should throw error for non-sequential range: %s", (invalidRange, expectedError) => { + it("Should throw error for invalid ContentRange format", () => { + const partSize = 5242880; + expect(() => { - tm.validateExpectedRanges("bytes 0-5242879/13631488", invalidRange, 2); - }).toThrow(expectedError); + tm.validatePartRange(2, "invalid-format", partSize); + }).toThrow("Invalid ContentRange format: invalid-format"); }); }); }); + +// describe("join-streams tests", () => { +// const streamTypes = [ +// { +// name: "Readable", +// createStream: () => new Readable({ read() {} }), +// supported: true, +// streamType: Readable, +// }, +// { +// name: "ReadableStream", +// createStream: () => new ReadableStream(), +// supported: false, +// streamType: ReadableStream, +// }, +// { +// name: "Blob", +// createStream: () => new Blob(["test"]), +// supported: false, +// streamType: Blob, +// }, +// ]; + +// streamTypes.forEach(({ name, createStream, supported, streamType }) => { +// describe.skipIf(!supported)(`${name} streams`, () => { +// describe("joinStreams()", () => { +// it(`Should return single ${name} when only one stream is provided`, () => { +// const stream = createStream(); +// const result = joinStreams([stream as unknown as StreamingBlobPayloadOutputTypes]); + +// expect(result).toBeDefined(); +// expect(result).toBe(stream); +// }); +// it(`Should handle empty ${name} streams array`, () => { +// const result = joinStreams([] as unknown as StreamingBlobPayloadOutputTypes[]); +// expect(result).toBeDefined(); +// expect(result).toBeInstanceOf(streamType); +// }); +// it(`Should join multiple ${name} streams into a single stream`, async () => { +// const content1 = Buffer.from("Chunk 1"); +// const content2 = Buffer.from("Chunk 2"); +// const content3 = Buffer.from("Chunk 3"); + +// if (name === "Readable") { +// const stream1 = new Readable({ +// read() { +// this.push(content1); +// this.push(null); +// }, +// }); +// const stream2 = new Readable({ +// read() { +// this.push(content2); +// this.push(null); +// }, +// }); +// const stream3 = new Readable({ +// read() { +// this.push(content3); +// this.push(null); +// }, +// }); + +// const joinedStream = joinStreams([ +// stream1, +// stream2, +// stream3, +// ] as unknown as StreamingBlobPayloadOutputTypes[]); + +// const chunks: Buffer[] = []; +// for await (const chunk of joinedStream as any) { +// chunks.push(Buffer.from(chunk)); +// } + +// const joinedContent = Buffer.concat(chunks).toString(); +// expect(joinedContent).toContain(content1.toString()); +// expect(joinedContent).toContain(content2.toString()); +// expect(joinedContent).toContain(content3.toString()); +// } +// }); +// it(`Should handle ${name} streams with different chunk sizes`, async () => { +// const content1 = Buffer.from("Chunk 1 Chunk 1 Chunk 1"); +// const content2 = Buffer.from("Chunk 2"); +// const content3 = Buffer.from("Chunk 3 Chunk 3"); + +// if (name === "Readable") { +// const stream1 = new Readable({ +// read() { +// this.push(content1); +// this.push(null); +// }, +// }); +// const stream2 = new Readable({ +// read() { +// this.push(content2); +// this.push(null); +// }, +// }); +// const stream3 = new Readable({ +// read() { +// this.push(content3); +// this.push(null); +// }, +// }); + +// const joinedStream = joinStreams([ +// stream1, +// stream2, +// stream3, +// ] as unknown as StreamingBlobPayloadOutputTypes[]); + +// const chunks: Buffer[] = []; +// for await (const chunk of joinedStream as any) { +// chunks.push(Buffer.from(chunk)); +// } + +// const joinedContent = Buffer.concat(chunks).toString(); +// expect(joinedContent).toContain(content1.toString()); +// expect(joinedContent).toContain(content2.toString()); +// expect(joinedContent).toContain(content3.toString()); +// } +// }); +// it(`Should handle ${name} streams with no data`, async () => { +// if (name === "Readable") { +// const emptyStream1 = new Readable({ +// read() { +// this.push(null); +// }, +// }); +// const emptyStream2 = new Readable({ +// read() { +// this.push(null); +// }, +// }); + +// const joinedStream = joinStreams([ +// emptyStream1, +// emptyStream2, +// ] as unknown as StreamingBlobPayloadOutputTypes[]); + +// const chunks: Buffer[] = []; +// for await (const chunk of joinedStream as any) { +// chunks.push(Buffer.from(chunk)); +// } +// expect(chunks.length).toBe(0); +// expect(Buffer.concat(chunks).length).toBe(0); +// } +// }); +// it(`Should properly close/cleanup ${name} streams after processing`, async () => { +// if (name === "Readable") { +// const stream1 = new Readable({ +// read() { +// this.push(Buffer.from("data")); +// this.push(null); +// }, +// }); +// const stream2 = new Readable({ +// read() { +// this.push(Buffer.from("more")); +// this.push(null); +// }, +// }); + +// const destroySpy1 = vi.spyOn(stream1, "destroy"); +// const destroySpy2 = vi.spyOn(stream2, "destroy"); + +// const joinedStream = joinStreams([stream1, stream2] as unknown as StreamingBlobPayloadOutputTypes[]); + +// for await (const chunk of joinedStream as any) { +// // consume the data +// } + +// expect(destroySpy1).toHaveBeenCalled(); +// expect(destroySpy2).toHaveBeenCalled(); +// } +// }); +// }); + +// describe("iterateStreams()", () => { +// it(`Should iterate through single ${name} stream`, async () => { +// if (name === "Readable") { +// const stream1 = new Readable({ +// read() { +// this.push(Buffer.from("stream 1")); +// this.push(null); +// }, +// }); +// const iterator = iterateStreams([stream1] as unknown as StreamingBlobPayloadOutputTypes[]); + +// const chunks: string[] = []; +// for await (const chunk of iterator) { +// chunks.push(chunk.toString()); +// } +// expect(chunks).toEqual(["stream 1"]); +// } +// }); +// it(`Should iterate through multiple ${name} streams in order`, async () => { +// if (name === "Readable") { +// const stream1 = new Readable({ +// read() { +// this.push(Buffer.from("stream 1")); +// this.push(null); +// }, +// }); +// const stream2 = new Readable({ +// read() { +// this.push(Buffer.from("stream 2")); +// this.push(null); +// }, +// }); +// const stream3 = new Readable({ +// read() { +// this.push(Buffer.from("stream 3")); +// this.push(null); +// }, +// }); +// const iterator = iterateStreams([ +// stream1, +// stream2, +// stream3, +// ] as unknown as StreamingBlobPayloadOutputTypes[]); + +// const chunks: string[] = []; +// for await (const chunk of iterator) { +// chunks.push(chunk.toString()); +// } +// expect(chunks).toEqual(["stream 1", "stream 2", "stream 3"]); +// } +// }); +// it(`Should call onBytes callback during ${name} iteration`, () => {}); +// it(`Should call onCompletion callback after ${name} iteration completes`, () => {}); +// it(`Should call onFailure callback when ${name} iteration fails`, () => {}); +// it(`Should handle empty ${name} streams during iteration`, () => {}); +// it(`Should track correct byte count across ${name} streams`, () => {}); +// it(`Should maintain correct index during ${name} stream iteration`, () => {}); +// }); +// }); +// }); +// }); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index f7f5282d4148..e2e3d13f238d 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -6,8 +6,7 @@ import type { } from "@aws-sdk/client-s3"; import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; import { getChecksum } from "@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum"; -import { ChecksumConstructor, StreamingBlobPayloadOutputTypes } from "@smithy/types"; -import { Checksum } from "@smithy/types"; +import { type StreamingBlobPayloadOutputTypes, Checksum, ChecksumConstructor } from "@smithy/types"; import type { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; import { joinStreams } from "./join-streams"; @@ -36,6 +35,7 @@ export class S3TransferManager implements IS3TransferManager { private readonly checksumAlgorithm: ChecksumAlgorithm; private readonly multipartDownloadType: "PART" | "RANGE"; private readonly eventListeners: TransferEventListeners; + private readonly abortCleanupFunctions = new WeakMap void>(); public constructor(config: S3TransferManagerConfig = {}) { this.checksumValidationEnabled = config.checksumValidationEnabled ?? true; @@ -84,16 +84,8 @@ export class S3TransferManager implements IS3TransferManager { callback: EventListener, options?: AddEventListenerOptions | boolean ): void; - public addEventListener( - type: string, - callback: EventListener | null, - options?: AddEventListenerOptions | boolean - ): void; - public addEventListener( - type: unknown, - callback: EventListener, - options?: AddEventListenerOptions | boolean - ): void { + public addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void; + public addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void { const eventType = type as keyof TransferEventListeners; const listeners = this.eventListeners[eventType]; @@ -101,12 +93,26 @@ export class S3TransferManager implements IS3TransferManager { throw new Error(`Unknown event type: ${eventType}`); } - // TODO: Add support for AbortSignal - const once = typeof options !== "boolean" && options?.once; + const signal = typeof options !== "boolean" ? options?.signal : undefined; let updatedCallback = callback; + + if (signal?.aborted) { + return; + } + + if (signal) { + const removeListenerAfterAbort = () => { + this.removeEventListener(eventType, updatedCallback); + this.abortCleanupFunctions.delete(signal); + }; + + this.abortCleanupFunctions.set(signal, () => signal.removeEventListener("abort", removeListenerAfterAbort)); + signal.addEventListener("abort", removeListenerAfterAbort, { once: true }); + } + if (once) { - updatedCallback = (event: any) => { + updatedCallback = (event: Event) => { if (typeof callback === "function") { callback(event); } else { @@ -115,29 +121,31 @@ export class S3TransferManager implements IS3TransferManager { this.removeEventListener(eventType, updatedCallback); }; } - - if (eventType === "transferInitiated" || eventType === "bytesTransferred" || eventType === "transferFailed") { - listeners.push(updatedCallback as EventListener); - } else if (eventType === "transferComplete") { - (listeners as EventListener[]).push( - updatedCallback as EventListener - ); - } + listeners.push(updatedCallback); } + /** + * todo: what does the return boolean mean? + * + * it returns false if the event is cancellable, and at least oneo the handlers which received event called + * Event.preventDefault(). Otherwise true. + * The use cases of preventDefault() does not apply to transfermanager but we should still keep the boolean + * and continue to return true to stay consistent with EventTarget. + * + */ public dispatchEvent(event: Event & TransferEvent): boolean; public dispatchEvent(event: Event & TransferCompleteEvent): boolean; public dispatchEvent(event: Event): boolean; - public dispatchEvent(event: any): boolean { + public dispatchEvent(event: Event): boolean { const eventType = event.type; - const listeners = this.eventListeners[eventType as keyof TransferEventListeners]; + const listeners = this.eventListeners[eventType as keyof TransferEventListeners] as EventListener[]; if (listeners) { - for (const callback of listeners) { - if (typeof callback === "function") { - callback(event); + for (const listener of listeners) { + if (typeof listener === "function") { + listener(event); } else { - callback.handleEvent?.(event); + listener.handleEvent(event); } } } @@ -166,25 +174,29 @@ export class S3TransferManager implements IS3TransferManager { ): void; public removeEventListener( type: string, - callback: EventListener | null, + callback: EventListener, options?: RemoveEventListenerOptions | boolean ): void; - public removeEventListener(type: unknown, callback: unknown, options?: unknown): void { + public removeEventListener( + type: string, + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void { const eventType = type as keyof TransferEventListeners; const listeners = this.eventListeners[eventType]; if (listeners) { - if (eventType === "transferInitiated" || eventType === "bytesTransferred" || eventType === "transferFailed") { + if ( + eventType === "transferInitiated" || + eventType === "bytesTransferred" || + eventType === "transferFailed" || + eventType === "transferComplete" + ) { const eventListener = callback as EventListener; - const index = listeners.indexOf(eventListener); - if (index !== -1) { + let index = listeners.indexOf(eventListener); + while (index !== -1) { listeners.splice(index, 1); - } - } else if (eventType === "transferComplete") { - const eventListener = callback as EventListener; - const index = (listeners as EventListener[]).indexOf(eventListener); - if (index !== -1) { - (listeners as EventListener[]).splice(index, 1); + index = listeners.indexOf(eventListener); } } else { throw new Error(`Unknown event type: ${type}`); @@ -196,205 +208,117 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } + /** + * What is missing from the revised SEP and this implementation currently? + * PART mode: + * - Step 5: validate GetObject response for each part + * - If validation fails at any point, cancel all ongoing requests and error out + * - Step 6: after all requests have been sent, validate that the total number of part GET requests sent matches with the + * expected `PartsCount` + * - Step 7: when creating DownloadResponse, set accordingly: + * - (DONE) `ContentLength` : total length of the object saved from Step 3 + * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` + * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as + * the checksum value returned from a part GET request is not the composite + * checksum for the entire object + * RANGE mode: + * - Step 7: validate GetObject response for each part. If validation fails or a + * request fails at any point, cancel all ongoing requests and return an error to + * the user. + * - Step 8: after all requests have sent, validate that the total number of ranged + * GET requests sent matches with the expected number saved from Step 5. + * - Step 9: create DownloadResponse. Copy the fields in GetObject response from + * Step 3 and set the following fields accordingly: + * - (DONE) `ContentLength` : total length of the object saved from Step 3 + * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` + * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as + * the checksum value returned from a part GET request is not the composite + * checksum for the entire object + * Checksum validation notes: + * - + * + */ public async download(request: DownloadRequest, transferOptions?: TransferOptions): Promise { - const metadata = {} as Omit; - const streams = [] as StreamingBlobPayloadOutputTypes[]; - const requests = [] as GetObjectCommandInput[]; - const partNumber = request.PartNumber; - const range = request.Range; - let totalSize: number | undefined; - - if (transferOptions?.eventListeners) { - for await (const listeners of this.iterateListeners(transferOptions?.eventListeners)) { - for (const listener of listeners) { - this.addEventListener(listener.eventType, listener.callback as EventListener); - } - } - } - - // TODO: Ensure download operation is treated as single object download when partNumber is provided regardless of multipartDownloadType setting if (typeof partNumber === "number") { - const getObjectRequest = { - ...request, - PartNumber: partNumber, - }; - const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - - this.dispatchEvent( - Object.assign(new Event("transferInitiated"), { - request, - snapshot: { - transferredBytes: 0, - totalBytes: getObject.ContentLength, - }, - }) + throw new Error( + "partNumber included: S3 Transfer Manager does not support downloads for specific parts. Use GetObjectCommand instead" ); + } - if (getObject.Body) { - streams.push(getObject.Body); - requests.push(getObjectRequest); - } - this.assignMetadata(metadata, getObject); - } else if (this.multipartDownloadType === "PART") { - if (range == null) { - const initialPartRequest = { - ...request, - PartNumber: 1, - }; - const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); - const initialETag = initialPart.ETag ?? undefined; - totalSize = initialPart.ContentRange ? parseInt(initialPart.ContentRange.split("/")[1]) : undefined; - - this.dispatchTransferInitiatedEvent(request, totalSize); - if (initialPart.Body) { - streams.push(initialPart.Body); - requests.push(initialPartRequest); - } - this.assignMetadata(metadata, initialPart); - - if (initialPart.PartsCount! > 1) { - let previousPart = initialPart; - for (let part = 2; part <= initialPart.PartsCount!; part++) { - const getObjectRequest = { - ...request, - PartNumber: part, - IfMatch: !request.VersionId ? initialETag : undefined, - }; - const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - - if (getObject.ContentRange && previousPart.ContentRange) { - this.validateExpectedRanges(previousPart.ContentRange, getObject.ContentRange, part); - } - - if (getObject.Body) { - streams.push(getObject.Body); - requests.push(getObjectRequest); - } - this.assignMetadata(metadata, getObject); - previousPart = getObject; - } - } - } else { - const getObjectRequest = { - ...request, - }; - const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - totalSize = getObject.ContentRange ? parseInt(getObject.ContentRange.split("/")[1]) : undefined; - - this.dispatchTransferInitiatedEvent(request, totalSize); - if (getObject.Body) { - streams.push(getObject.Body); - requests.push(getObjectRequest); - } - this.assignMetadata(metadata, getObject); - } - } else if (this.multipartDownloadType === "RANGE") { - let initialETag = undefined; - let left = 0; - let right = S3TransferManager.MIN_PART_SIZE; - let maxRange = Infinity; - - if (range != null) { - const [userRangeLeft, userRangeRight] = range.replace("bytes=", "").split("-").map(Number); - - maxRange = userRangeRight; - left = userRangeLeft; - right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE); - } + const metadata = {} as Omit; + const streams = [] as Promise[]; + const requests = [] as GetObjectCommandInput[]; - let remainingLength = 1; - let transferInitiatedEventDispatched = false; - - // TODO: Validate ranges for if multipartDownloadType === "RANGE" - while (remainingLength > 0) { - const range = `bytes=${left}-${right}`; - const getObjectRequest: GetObjectCommandInput = { - ...request, - Range: range, - IfMatch: transferInitiatedEventDispatched && !request.VersionId ? initialETag : undefined, - }; - const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - - if (!transferInitiatedEventDispatched) { - totalSize = getObject.ContentRange ? parseInt(getObject.ContentRange.split("/")[1]) : undefined; - - this.dispatchTransferInitiatedEvent(request, totalSize); - initialETag = getObject.ETag ?? undefined; - transferInitiatedEventDispatched = true; - } + let totalSize: number | undefined; - if (getObject.Body) { - streams.push(getObject.Body); - requests.push(getObjectRequest); + this.checkAborted(transferOptions); + this.addEventListeners(transferOptions?.eventListeners); - // TODO: - // after completing SEP requirements: - // - acquire lock on webstreams in the same - // - synchronous frame as they are opened or else - // - the connection might be closed too early. - if (typeof (getObject.Body as ReadableStream).getReader === "function") { - const reader = (getObject.Body as any).getReader(); - (getObject.Body as any).getReader = function () { - return reader; - }; - } - } - this.assignMetadata(metadata, getObject); + if (this.multipartDownloadType === "PART") { + const responseMetadata = await this.downloadByPart(request, transferOptions ?? {}, streams, requests, metadata); + totalSize = responseMetadata.totalSize; + } else if (this.multipartDownloadType === "RANGE") { + const responseMetadata = await this.downloadByRange(request, transferOptions ?? {}, streams, requests, metadata); + totalSize = responseMetadata.totalSize; + } - left = right + 1; - right = Math.min(left + S3TransferManager.MIN_PART_SIZE, maxRange); + const removeLocalEventListeners = () => { + this.removeEventListeners(transferOptions?.eventListeners); - remainingLength = Math.min( - right - left, - Math.max(0, (getObject.ContentLength ?? 0) - S3TransferManager.MIN_PART_SIZE) - ); + // remove any local abort() listeners after request completes. + if (transferOptions?.abortSignal) { + this.abortCleanupFunctions.get(transferOptions.abortSignal as AbortSignal)?.(); + this.abortCleanupFunctions.delete(transferOptions.abortSignal as AbortSignal); } - } + }; - const responseBody = joinStreams(streams, { - onBytes: (byteLength: number, index) => { - this.dispatchEvent( - Object.assign(new Event("bytesTransferred"), { - request: requests[index], - snapshot: { - transferredBytes: byteLength, - totalBytes: totalSize, - }, - }) - ); - }, - onCompletion: (byteLength: number, index) => { - this.dispatchEvent( - Object.assign(new Event("transferComplete"), { - request: requests[index], - response: { - ...metadata, - Body: responseBody, - }, - snapshot: { - transferredBytes: byteLength, - totalBytes: totalSize, - }, - }) - ); - }, - onFailure: (error: unknown, index) => { - this.dispatchEvent( - Object.assign(new Event("transferFailed"), { - request: requests[index], - snapshot: { - transferredBytes: error, - totalBytes: totalSize, - }, - }) - ); - }, - }); + // TODO: + // after completing SEP requirements: + // - acquire lock on webstreams in the same + // - synchronous frame as they are opened or else + // - the connection might be closed too early. const response = { ...metadata, - Body: responseBody, + Body: await joinStreams(streams, { + onBytes: (byteLength: number, index) => { + this.dispatchEvent( + Object.assign(new Event("bytesTransferred"), { + request: requests[index], + snapshot: { + transferredBytes: byteLength, + totalBytes: totalSize, + }, + }) + ); + }, + onCompletion: (byteLength: number, index) => { + this.dispatchEvent( + Object.assign(new Event("transferComplete"), { + request: requests[index], + response, + snapshot: { + transferredBytes: byteLength, + totalBytes: totalSize, + }, + }) + ); + removeLocalEventListeners(); + }, + onFailure: (error: unknown, index) => { + this.dispatchEvent( + Object.assign(new Event("transferFailed"), { + request: requests[index], + snapshot: { + transferredBytes: error, + totalBytes: totalSize, + }, + }) + ); + removeLocalEventListeners(); + }, + }), }; return response; @@ -429,6 +353,200 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } + protected async downloadByPart( + request: DownloadRequest, + transferOptions: TransferOptions, + streams: Promise[], + requests: GetObjectCommandInput[], + metadata: Omit + ): Promise<{ totalSize: number | undefined }> { + let totalSize: number | undefined; + this.checkAborted(transferOptions); + + if (request.Range == null) { + const initialPartRequest = { + ...request, + PartNumber: 1, + }; + const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); + const initialETag = initialPart.ETag ?? undefined; + totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : undefined; + + this.dispatchTransferInitiatedEvent(request, totalSize); + if (initialPart.Body) { + if (initialPart.Body && typeof (initialPart.Body as any).getReader === "function") { + const reader = (initialPart.Body as any).getReader(); + (initialPart.Body as any).getReader = function () { + return reader; + }; + } + streams.push(Promise.resolve(initialPart.Body)); + requests.push(initialPartRequest); + } + this.updateResponseLengthAndRange(initialPart, totalSize); + this.assignMetadata(metadata, initialPart); + + if (initialPart.PartsCount! > 1) { + for (let part = 2; part <= initialPart.PartsCount!; part++) { + this.checkAborted(transferOptions); + + const getObjectRequest = { + ...request, + PartNumber: part, + IfMatch: !request.VersionId ? initialETag : undefined, + }; + const getObject = this.s3ClientInstance + .send(new GetObjectCommand(getObjectRequest), transferOptions) + .then((response) => { + // this.validatePartRange(part, response.ContentRange, this.targetPartSizeBytes); + if (response.Body && typeof (response.Body as any).getReader === "function") { + const reader = (response.Body as any).getReader(); + (response.Body as any).getReader = function () { + return reader; + }; + } + return response.Body!; + }); + + streams.push(getObject); + requests.push(getObjectRequest); + } + } + } else { + this.checkAborted(transferOptions); + + const getObjectRequest = { + ...request, + }; + const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + totalSize = getObject.ContentRange ? Number.parseInt(getObject.ContentRange.split("/")[1]) : undefined; + + this.dispatchTransferInitiatedEvent(request, totalSize); + if (getObject.Body) { + streams.push(Promise.resolve(getObject.Body)); + requests.push(getObjectRequest); + } + this.updateResponseLengthAndRange(getObject, totalSize); + this.assignMetadata(metadata, getObject); + } + + return { + totalSize, + }; + } + + protected async downloadByRange( + request: DownloadRequest, + transferOptions: TransferOptions, + streams: Promise[], + requests: GetObjectCommandInput[], + metadata: Omit + ): Promise<{ totalSize: number | undefined }> { + this.checkAborted(transferOptions); + + let left = 0; + let right = this.targetPartSizeBytes - 1; + let maxRange = Number.POSITIVE_INFINITY; + + if (request.Range != null) { + const [userRangeLeft, userRangeRight] = request.Range.replace("bytes=", "").split("-").map(Number); + + maxRange = userRangeRight; + left = userRangeLeft; + right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE - 1); + } + + let remainingLength = 1; + const getObjectRequest: GetObjectCommandInput = { + ...request, + Range: `bytes=${left}-${right}`, + }; + const initialRangeGet = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + const initialETag = initialRangeGet.ETag ?? undefined; + const totalSize = initialRangeGet.ContentRange + ? Number.parseInt(initialRangeGet.ContentRange.split("/")[1]) + : undefined; + if (initialRangeGet.Body && typeof (initialRangeGet.Body as any).getReader === "function") { + const reader = (initialRangeGet.Body as any).getReader(); + (initialRangeGet.Body as any).getReader = function () { + return reader; + }; + } + + this.dispatchTransferInitiatedEvent(request, totalSize); + + streams.push(Promise.resolve(initialRangeGet.Body!)); + requests.push(getObjectRequest); + this.updateResponseLengthAndRange(initialRangeGet, totalSize); + this.assignMetadata(metadata, initialRangeGet); + + left = right + 1; + right = Math.min(left + S3TransferManager.MIN_PART_SIZE - 1, maxRange); + remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; + + while (remainingLength > 0) { + this.checkAborted(transferOptions); + + const range = `bytes=${left}-${right}`; + const getObjectRequest: GetObjectCommandInput = { + ...request, + Range: range, + IfMatch: !request.VersionId ? initialETag : undefined, + }; + const getObject = this.s3ClientInstance + .send(new GetObjectCommand(getObjectRequest), transferOptions) + .then((response) => { + if (response.Body && typeof (response.Body as any).getReader === "function") { + const reader = (response.Body as any).getReader(); + (response.Body as any).getReader = function () { + return reader; + }; + } + return response.Body!; + }); + + streams.push(getObject); + requests.push(getObjectRequest); + + left = right + 1; + right = Math.min(left + S3TransferManager.MIN_PART_SIZE - 1, maxRange); + remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; + } + + return { + totalSize, + }; + } + + private addEventListeners(eventListeners?: TransferEventListeners): void { + for (const listeners of this.iterateListeners(eventListeners)) { + for (const listener of listeners) { + this.addEventListener(listener.eventType, listener.callback as EventListener); + } + } + } + + private removeEventListeners(eventListeners?: TransferEventListeners): void { + for (const listeners of this.iterateListeners(eventListeners)) { + for (const listener of listeners) { + this.removeEventListener(listener.eventType, listener.callback as EventListener); + } + } + } + + private updateResponseLengthAndRange(response: DownloadResponse, totalSize: number | undefined): void { + if (totalSize !== undefined) { + response.ContentLength = totalSize; + response.ContentRange = `bytes 0-${totalSize - 1}/${totalSize}`; + } + } + + private checkAborted(transferOptions?: TransferOptions): void { + if (transferOptions?.abortSignal?.aborted) { + throw Object.assign(new Error("Download aborted."), { name: "AbortError" }); + } + } + private assignMetadata(container: any, response: any) { for (const key in response) { if (key === "Body") { @@ -457,29 +575,11 @@ export class S3TransferManager implements IS3TransferManager { return true; } - /** - * For debugging purposes - * - * @internal - */ - private logCallbackCount(type: unknown): void { - const eventType = type as keyof TransferEventListeners; - const listeners = this.eventListeners[eventType]; - - console.log(`Callback count for ${eventType}: `); - let count = 0; - if (listeners) { - for (const callbacks of listeners) { - count++; - } - } - console.log(count); - } - - private async *iterateListeners(eventListeners: TransferEventListeners) { - for (const eventType in eventListeners) { + private *iterateListeners(eventListeners: TransferEventListeners = {}) { + for (const key in eventListeners) { + const eventType = key as keyof TransferEventListeners; const listeners = eventListeners[eventType as keyof TransferEventListeners]; - if (listeners) { + if (Array.isArray(listeners)) { for (const callback of listeners) { yield [ { @@ -492,43 +592,34 @@ export class S3TransferManager implements IS3TransferManager { } } - private validateExpectedRanges(previousPart: string, currentPart: string, partNum: number) { - const parseContentRange = (range: string) => { - const match = range.match(/bytes (\d+)-(\d+)\/(\d+)/); - if (!match) throw new Error(`Invalid ContentRange format: ${range}`); - return { - start: parseInt(match[1]), - end: parseInt(match[2]), - total: parseInt(match[3]), - }; - }; + private validatePartRange(partNumber: number, contentRange: string | undefined, partSize: number) { + if (!contentRange) return; - // TODO: throw error for incomplete download. - // Ex: final part and 8 bytes short should throw error -> "bytes 10485760-13631480/13631488" + const match = contentRange.match(/bytes (\d+)-(\d+)\/(\d+)/); + if (!match) throw new Error(`Invalid ContentRange format: ${contentRange}`); - try { - const previous = parseContentRange(previousPart); - const current = parseContentRange(currentPart); + const start = Number.parseInt(match[1]); + const end = Number.parseInt(match[2]); + const total = Number.parseInt(match[3]); - const expectedStart = previous.end + 1; - const prevPartSize = previous.end - previous.start + 1; - const currPartSize = current.end - current.start + 1; + const expectedStart = (partNumber - 1) * partSize; + const expectedEnd = Math.min(expectedStart + partSize - 1, total - 1); - if (current.start !== expectedStart) { - throw new Error(`Expected part ${partNum} to start at ${expectedStart} but got ${current.start}`); - } + // console.log({ + // partNumber, + // start, + // end, + // total, + // expectedStart, + // expectedEnd + // }); - // console.log(currPartSize < prevPartSize); - // console.log(current.end !== current.total - 1); - if (currPartSize < prevPartSize && current.end !== current.total - 1) { - throw new Error( - `Final part did not cover total range of ${current.total}. Expected range of bytes ${current.start}-${ - currPartSize - 1 - }` - ); - } - } catch (error) { - throw new Error(`Range validation failed: ${error.message}`); + if (start !== expectedStart) { + throw new Error(`Expected part ${partNumber} to start at ${expectedStart} but got ${start}`); + } + + if (end !== expectedEnd) { + throw new Error(`Expected part ${partNumber} to end at ${expectedEnd} but got ${end}`); } } } diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts index 5b2b13d3d8b4..81ecba3aca29 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -1,17 +1,16 @@ import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; import { isBlob, isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; -// check all types. needs to join nodejs and browser together -export function joinStreams(streams: StreamingBlobPayloadOutputTypes[]): StreamingBlobPayloadOutputTypes { - console.log("Is Readable Stream: "); - console.log(isReadableStream(streams[0])); +import { JoinStreamIterationEvents } from "./types"; - if (streams.length === 1) { - return streams[0]; - } else if (isReadableStream(streams[0]) || isBlob(streams[0])) { +export async function joinStreams( + streams: Promise[], + eventListeners?: JoinStreamIterationEvents +): Promise { + if (isReadableStream(streams[0])) { const newReadableStream = new ReadableStream({ async start(controller) { - for await (const chunk of iterateStreams(streams)) { + for await (const chunk of iterateStreams(streams, eventListeners)) { controller.enqueue(chunk); } controller.close(); @@ -19,25 +18,36 @@ export function joinStreams(streams: StreamingBlobPayloadOutputTypes[]): Streami }); return sdkStreamMixin(newReadableStream); } else { - throw new Error("Unknown stream type"); + throw new Error("Unsupported Stream Type"); } } export async function* iterateStreams( - streams: StreamingBlobPayloadOutputTypes[] + streams: Promise[], + eventListeners?: JoinStreamIterationEvents ): AsyncIterable { - for (const stream of streams) { + let bytesTransferred = 0; + let index = 0; + for (const streamPromise of streams) { + const stream = await streamPromise; if (isReadableStream(stream)) { - const reader = (stream as ReadableStream).getReader(); - try { - while (true) { - const { done, value } = await reader.read(); - if (done) break; - yield value; + const reader = stream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; } - } finally { - reader.releaseLock(); + yield value; + bytesTransferred += value.byteLength; + eventListeners?.onBytes?.(bytesTransferred, index); } + reader.releaseLock(); + } else { + const failure = new Error(`unhandled stream type ${(stream as any)?.constructor?.name}`); + eventListeners?.onFailure?.(failure, index); + throw failure; } + index++; } + eventListeners?.onCompletion?.(bytesTransferred, index - 1); } diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 574c99aba51f..d99bb94561c5 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -1,17 +1,15 @@ import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; -import { isBlob, isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; +import { isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; import { Readable } from "stream"; import { JoinStreamIterationEvents } from "./types"; // TODO: check all types. needs to join nodejs and browser together -export function joinStreams( - streams: StreamingBlobPayloadOutputTypes[], +export async function joinStreams( + streams: Promise[], eventListeners?: JoinStreamIterationEvents -): StreamingBlobPayloadOutputTypes { - if (streams.length === 1) { - return streams[0]; - } else if (isReadableStream(streams[0])) { +): Promise { + if (isReadableStream(streams[0])) { const newReadableStream = new ReadableStream({ async start(controller) { for await (const chunk of iterateStreams(streams, eventListeners)) { @@ -21,43 +19,36 @@ export function joinStreams( }, }); return sdkStreamMixin(newReadableStream); - } else if (isBlob(streams[0])) { - throw new Error("Blob not supported yet"); } else { return sdkStreamMixin(Readable.from(iterateStreams(streams, eventListeners))); } } export async function* iterateStreams( - streams: StreamingBlobPayloadOutputTypes[], + streams: Promise[], eventListeners?: JoinStreamIterationEvents ): AsyncIterable { let bytesTransferred = 0; let index = 0; - for (const stream of streams) { + for (const streamPromise of streams) { + const stream = await streamPromise; if (isReadableStream(stream)) { - // const reader = stream.getReader(); - // while (true) { - // const { done, value } = await reader.read(); - // if (done) { - // break; - // } - // yield value; - // bytesTransferred += value.byteLength; - // } - // reader.releaseLock(); - - const failure = new Error(`ReadableStreams not supported yet ${(stream as any)?.constructor?.name}`); - eventListeners?.onFailure?.(failure, index); - throw failure; - } else if (isBlob(stream)) { - throw new Error("Blob not supported yet"); + const reader = stream.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + yield value; + bytesTransferred += value.byteLength; + eventListeners?.onBytes?.(bytesTransferred, index); + } + reader.releaseLock(); } else if (stream instanceof Readable) { for await (const chunk of stream) { yield chunk; const chunkSize = Buffer.isBuffer(chunk) ? chunk.length : Buffer.byteLength(chunk); bytesTransferred += chunkSize; - eventListeners?.onBytes?.(bytesTransferred, index); } } else { diff --git a/lib/lib-storage/src/s3-transfer-manager/types.ts b/lib/lib-storage/src/s3-transfer-manager/types.ts index 6b6e81f6d6b1..69801f475970 100644 --- a/lib/lib-storage/src/s3-transfer-manager/types.ts +++ b/lib/lib-storage/src/s3-transfer-manager/types.ts @@ -216,14 +216,14 @@ export interface IS3TransferManager { callback: EventListener, options?: AddEventListenerOptions | boolean ): void; - addEventListener(type: string, callback: EventListener | null, options?: AddEventListenerOptions | boolean): void; + addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void; /** * Dispatches an event to the registered event listeners. * Triggers callbacks registered via addEventListener with matching event types. * * @param event - The event object to dispatch. - * @returns whether the event dispatched successfully + * @returns whether the event ran to completion * * @public */ @@ -261,11 +261,7 @@ export interface IS3TransferManager { callback: EventListener, options?: RemoveEventListenerOptions | boolean ): void; - removeEventListener( - type: string, - callback: EventListener | null, - options?: RemoveEventListenerOptions | boolean - ): void; + removeEventListener(type: string, callback: EventListener, options?: RemoveEventListenerOptions | boolean): void; } /** From 193825160fbbf9a40dd48608b1d32a5e40207ecf Mon Sep 17 00:00:00 2001 From: George Fu Date: Mon, 21 Jul 2025 15:32:13 -0400 Subject: [PATCH 03/30] feat(lib-storage): spec updates --- .../S3TransferManager.e2e.spec.ts | 2 +- .../s3-transfer-manager/S3TransferManager.ts | 114 +++++++++++++----- 2 files changed, 86 insertions(+), 30 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 707391bfb0d2..49bc9f5956a7 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -111,7 +111,7 @@ describe(S3TransferManager.name, () => { } }); - describe.skip("(SEP) download single object tests", () => { + describe("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index e2e3d13f238d..17b6c5c0a502 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -211,29 +211,29 @@ export class S3TransferManager implements IS3TransferManager { /** * What is missing from the revised SEP and this implementation currently? * PART mode: - * - Step 5: validate GetObject response for each part - * - If validation fails at any point, cancel all ongoing requests and error out + * - (DONE) Step 5: validate GetObject response for each part + * - If validation fails at any point, cancel all ongoing requests and error out * - Step 6: after all requests have been sent, validate that the total number of part GET requests sent matches with the * expected `PartsCount` * - Step 7: when creating DownloadResponse, set accordingly: - * - (DONE) `ContentLength` : total length of the object saved from Step 3 - * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` - * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as - * the checksum value returned from a part GET request is not the composite - * checksum for the entire object + * - (DONE) `ContentLength` : total length of the object saved from Step 3 + * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` + * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as + * the checksum value returned from a part GET request is not the composite + * checksum for the entire object * RANGE mode: - * - Step 7: validate GetObject response for each part. If validation fails or a + * - (DONE) Step 7: validate GetObject response for each part. If validation fails or a * request fails at any point, cancel all ongoing requests and return an error to * the user. * - Step 8: after all requests have sent, validate that the total number of ranged * GET requests sent matches with the expected number saved from Step 5. * - Step 9: create DownloadResponse. Copy the fields in GetObject response from * Step 3 and set the following fields accordingly: - * - (DONE) `ContentLength` : total length of the object saved from Step 3 - * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` - * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as - * the checksum value returned from a part GET request is not the composite - * checksum for the entire object + * - (DONE) `ContentLength` : total length of the object saved from Step 3 + * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` + * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as + * the checksum value returned from a part GET request is not the composite + * checksum for the entire object * Checksum validation notes: * - * @@ -370,8 +370,8 @@ export class S3TransferManager implements IS3TransferManager { }; const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); const initialETag = initialPart.ETag ?? undefined; + const partSize = initialPart.ContentLength; totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : undefined; - this.dispatchTransferInitiatedEvent(request, totalSize); if (initialPart.Body) { if (initialPart.Body && typeof (initialPart.Body as any).getReader === "function") { @@ -383,9 +383,12 @@ export class S3TransferManager implements IS3TransferManager { streams.push(Promise.resolve(initialPart.Body)); requests.push(initialPartRequest); } + this.updateResponseLengthAndRange(initialPart, totalSize); this.assignMetadata(metadata, initialPart); + this.updateChecksumValues(initialPart, metadata); + let partCount = 1; if (initialPart.PartsCount! > 1) { for (let part = 2; part <= initialPart.PartsCount!; part++) { this.checkAborted(transferOptions); @@ -398,7 +401,7 @@ export class S3TransferManager implements IS3TransferManager { const getObject = this.s3ClientInstance .send(new GetObjectCommand(getObjectRequest), transferOptions) .then((response) => { - // this.validatePartRange(part, response.ContentRange, this.targetPartSizeBytes); + this.validatePartDownload(part, response.ContentRange, partSize ?? 0); if (response.Body && typeof (response.Body as any).getReader === "function") { const reader = (response.Body as any).getReader(); (response.Body as any).getReader = function () { @@ -410,8 +413,15 @@ export class S3TransferManager implements IS3TransferManager { streams.push(getObject); requests.push(getObjectRequest); + partCount++; } } + + if (partCount !== initialPart.PartsCount) { + throw new Error( + `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` + ); + } } else { this.checkAborted(transferOptions); @@ -428,6 +438,7 @@ export class S3TransferManager implements IS3TransferManager { } this.updateResponseLengthAndRange(getObject, totalSize); this.assignMetadata(metadata, getObject); + this.updateChecksumValues(getObject, metadata); } return { @@ -447,6 +458,7 @@ export class S3TransferManager implements IS3TransferManager { let left = 0; let right = this.targetPartSizeBytes - 1; let maxRange = Number.POSITIVE_INFINITY; + let remainingLength = 1; if (request.Range != null) { const [userRangeLeft, userRangeRight] = request.Range.replace("bytes=", "").split("-").map(Number); @@ -455,17 +467,25 @@ export class S3TransferManager implements IS3TransferManager { left = userRangeLeft; right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE - 1); } - - let remainingLength = 1; const getObjectRequest: GetObjectCommandInput = { ...request, Range: `bytes=${left}-${right}`, }; const initialRangeGet = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + this.validateRangeDownload(`bytes=${left}-${right}`, initialRangeGet.ContentRange); const initialETag = initialRangeGet.ETag ?? undefined; const totalSize = initialRangeGet.ContentRange ? Number.parseInt(initialRangeGet.ContentRange.split("/")[1]) : undefined; + + let expectedRequestCount = 1; + if (totalSize) { + const contentLength = totalSize; + const remainingBytes = Math.max(0, contentLength - (right - left + 1)); + const additionalRequests = Math.ceil(remainingBytes / S3TransferManager.MIN_PART_SIZE); + expectedRequestCount += additionalRequests; + } + if (initialRangeGet.Body && typeof (initialRangeGet.Body as any).getReader === "function") { const reader = (initialRangeGet.Body as any).getReader(); (initialRangeGet.Body as any).getReader = function () { @@ -474,16 +494,17 @@ export class S3TransferManager implements IS3TransferManager { } this.dispatchTransferInitiatedEvent(request, totalSize); - streams.push(Promise.resolve(initialRangeGet.Body!)); requests.push(getObjectRequest); + this.updateResponseLengthAndRange(initialRangeGet, totalSize); this.assignMetadata(metadata, initialRangeGet); + this.updateChecksumValues(initialRangeGet, metadata); left = right + 1; right = Math.min(left + S3TransferManager.MIN_PART_SIZE - 1, maxRange); remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; - + let actualRequestCount = 1; while (remainingLength > 0) { this.checkAborted(transferOptions); @@ -496,6 +517,7 @@ export class S3TransferManager implements IS3TransferManager { const getObject = this.s3ClientInstance .send(new GetObjectCommand(getObjectRequest), transferOptions) .then((response) => { + this.validateRangeDownload(range, response.ContentRange); if (response.Body && typeof (response.Body as any).getReader === "function") { const reader = (response.Body as any).getReader(); (response.Body as any).getReader = function () { @@ -507,12 +529,19 @@ export class S3TransferManager implements IS3TransferManager { streams.push(getObject); requests.push(getObjectRequest); + actualRequestCount++; left = right + 1; right = Math.min(left + S3TransferManager.MIN_PART_SIZE - 1, maxRange); remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; } + if (expectedRequestCount !== actualRequestCount) { + throw new Error( + `The number of ranged GET requests sent (${actualRequestCount}) does not match the expected number (${expectedRequestCount})` + ); + } + return { totalSize, }; @@ -541,6 +570,15 @@ export class S3TransferManager implements IS3TransferManager { } } + private updateChecksumValues(initialPart: DownloadResponse, metadata: Omit) { + if (initialPart.ChecksumType === "COMPOSITE") { + metadata.ChecksumCRC32 = undefined; + metadata.ChecksumCRC32C = undefined; + metadata.ChecksumSHA1 = undefined; + metadata.ChecksumSHA256 = undefined; + } + } + private checkAborted(transferOptions?: TransferOptions): void { if (transferOptions?.abortSignal?.aborted) { throw Object.assign(new Error("Download aborted."), { name: "AbortError" }); @@ -592,7 +630,7 @@ export class S3TransferManager implements IS3TransferManager { } } - private validatePartRange(partNumber: number, contentRange: string | undefined, partSize: number) { + private validatePartDownload(partNumber: number, contentRange: string | undefined, partSize: number) { if (!contentRange) return; const match = contentRange.match(/bytes (\d+)-(\d+)\/(\d+)/); @@ -605,15 +643,6 @@ export class S3TransferManager implements IS3TransferManager { const expectedStart = (partNumber - 1) * partSize; const expectedEnd = Math.min(expectedStart + partSize - 1, total - 1); - // console.log({ - // partNumber, - // start, - // end, - // total, - // expectedStart, - // expectedEnd - // }); - if (start !== expectedStart) { throw new Error(`Expected part ${partNumber} to start at ${expectedStart} but got ${start}`); } @@ -622,4 +651,31 @@ export class S3TransferManager implements IS3TransferManager { throw new Error(`Expected part ${partNumber} to end at ${expectedEnd} but got ${end}`); } } + + private validateRangeDownload(requestRange: string, responseRange: string | undefined) { + if (!responseRange) return; + + const match = responseRange.match(/bytes (\d+)-(\d+)\/(\d+)/); + if (!match) throw new Error(`Invalid ContentRange format: ${responseRange}`); + + const start = Number.parseInt(match[1]); + const end = Number.parseInt(match[2]); + const total = Number.parseInt(match[3]); + + const rangeMatch = requestRange.match(/bytes=(\d+)-(\d+)/); + if (!rangeMatch) throw new Error(`Invalid Range format: ${requestRange}`); + + const expectedStart = Number.parseInt(rangeMatch[1]); + const expectedEnd = Number.parseInt(rangeMatch[2]); + + if (start !== expectedStart) { + throw new Error(`Expected range to start at ${expectedStart} but got ${start}`); + } + + const isFinalPart = end + 1 === total; + + if (end !== expectedEnd && !(isFinalPart && end < expectedEnd)) { + throw new Error(`Expected range to end at ${expectedEnd} but got ${end}`); + } + } } From 879000e4439d4040ebc19b23ea258d2d17729481 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Mon, 21 Jul 2025 21:29:52 +0000 Subject: [PATCH 04/30] feat: addressed pr comments --- .../S3TransferManager.e2e.spec.ts | 5 - .../S3TransferManager.spec.ts | 447 ++++++++---------- .../join-streams.browser.ts | 22 +- .../src/s3-transfer-manager/join-streams.ts | 22 +- 4 files changed, 226 insertions(+), 270 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 49bc9f5956a7..879fd2f6e9e0 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -31,7 +31,6 @@ describe(S3TransferManager.name, () => { let region: string; beforeAll(async () => { - // TODO: replace hard coded region and bucket with integration test resources. const integTestResourcesEnv = await getIntegTestResources(); Object.assign(process.env, integTestResourcesEnv); @@ -39,9 +38,6 @@ describe(S3TransferManager.name, () => { Bucket = process?.env?.AWS_SMOKE_TEST_BUCKET as string; void getIntegTestResources; - // region = "us-west-2"; - // Bucket = "lukachad-us-west-2"; - client = new S3({ region, }); @@ -96,7 +92,6 @@ describe(S3TransferManager.name, () => { bytesTransferred: [ ({ request, snapshot }) => { bytesTransferred = snapshot.transferredBytes; - // console.log(bytesTransferred); }, ], }, diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index 139b0dceadc4..0fa0eb9322c7 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -568,7 +568,7 @@ describe("S3TransferManager Unit Tests", () => { }); }); - describe("validatePartRange()", () => { + describe("validatePartDownload()", () => { let tm: any; beforeAll(async () => { tm = new S3TransferManager() as any; @@ -584,7 +584,7 @@ describe("S3TransferManager Unit Tests", () => { for (const { partNumber, range } of ranges) { expect(() => { - tm.validatePartRange(partNumber, range, partSize); + tm.validatePartDownload(partNumber, range, partSize); }).not.toThrow(); } }); @@ -593,15 +593,15 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartRange(2, "bytes 5242881-10485759/13631488", partSize); + tm.validatePartDownload(2, "bytes 5242881-10485759/13631488", partSize); }).toThrow("Expected part 2 to start at 5242880 but got 5242881"); expect(() => { - tm.validatePartRange(2, "bytes 5242879-10485759/13631488", partSize); + tm.validatePartDownload(2, "bytes 5242879-10485759/13631488", partSize); }).toThrow("Expected part 2 to start at 5242880 but got 5242879"); expect(() => { - tm.validatePartRange(2, "bytes 0-5242879/13631488", partSize); + tm.validatePartDownload(2, "bytes 0-5242879/13631488", partSize); }).toThrow("Expected part 2 to start at 5242880 but got 0"); }); @@ -609,11 +609,11 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartRange(2, "bytes 5242880-10485760/13631488", partSize); + tm.validatePartDownload(2, "bytes 5242880-10485760/13631488", partSize); }).toThrow("Expected part 2 to end at 10485759 but got 10485760"); expect(() => { - tm.validatePartRange(3, "bytes 10485760-13631480/13631488", partSize); + tm.validatePartDownload(3, "bytes 10485760-13631480/13631488", partSize); }).toThrow("Expected part 3 to end at 13631487 but got 13631480"); }); @@ -621,7 +621,7 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartRange(3, "bytes 10485760-13631487/13631488", partSize); + tm.validatePartDownload(3, "bytes 10485760-13631487/13631488", partSize); }).not.toThrow(); }); @@ -629,247 +629,200 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartRange(2, "invalid-format", partSize); + tm.validatePartDownload(2, "invalid-format", partSize); }).toThrow("Invalid ContentRange format: invalid-format"); }); }); + + // TODO: tests cases for validateRangeDownload() }); -// describe("join-streams tests", () => { -// const streamTypes = [ -// { -// name: "Readable", -// createStream: () => new Readable({ read() {} }), -// supported: true, -// streamType: Readable, -// }, -// { -// name: "ReadableStream", -// createStream: () => new ReadableStream(), -// supported: false, -// streamType: ReadableStream, -// }, -// { -// name: "Blob", -// createStream: () => new Blob(["test"]), -// supported: false, -// streamType: Blob, -// }, -// ]; - -// streamTypes.forEach(({ name, createStream, supported, streamType }) => { -// describe.skipIf(!supported)(`${name} streams`, () => { -// describe("joinStreams()", () => { -// it(`Should return single ${name} when only one stream is provided`, () => { -// const stream = createStream(); -// const result = joinStreams([stream as unknown as StreamingBlobPayloadOutputTypes]); - -// expect(result).toBeDefined(); -// expect(result).toBe(stream); -// }); -// it(`Should handle empty ${name} streams array`, () => { -// const result = joinStreams([] as unknown as StreamingBlobPayloadOutputTypes[]); -// expect(result).toBeDefined(); -// expect(result).toBeInstanceOf(streamType); -// }); -// it(`Should join multiple ${name} streams into a single stream`, async () => { -// const content1 = Buffer.from("Chunk 1"); -// const content2 = Buffer.from("Chunk 2"); -// const content3 = Buffer.from("Chunk 3"); - -// if (name === "Readable") { -// const stream1 = new Readable({ -// read() { -// this.push(content1); -// this.push(null); -// }, -// }); -// const stream2 = new Readable({ -// read() { -// this.push(content2); -// this.push(null); -// }, -// }); -// const stream3 = new Readable({ -// read() { -// this.push(content3); -// this.push(null); -// }, -// }); - -// const joinedStream = joinStreams([ -// stream1, -// stream2, -// stream3, -// ] as unknown as StreamingBlobPayloadOutputTypes[]); - -// const chunks: Buffer[] = []; -// for await (const chunk of joinedStream as any) { -// chunks.push(Buffer.from(chunk)); -// } - -// const joinedContent = Buffer.concat(chunks).toString(); -// expect(joinedContent).toContain(content1.toString()); -// expect(joinedContent).toContain(content2.toString()); -// expect(joinedContent).toContain(content3.toString()); -// } -// }); -// it(`Should handle ${name} streams with different chunk sizes`, async () => { -// const content1 = Buffer.from("Chunk 1 Chunk 1 Chunk 1"); -// const content2 = Buffer.from("Chunk 2"); -// const content3 = Buffer.from("Chunk 3 Chunk 3"); - -// if (name === "Readable") { -// const stream1 = new Readable({ -// read() { -// this.push(content1); -// this.push(null); -// }, -// }); -// const stream2 = new Readable({ -// read() { -// this.push(content2); -// this.push(null); -// }, -// }); -// const stream3 = new Readable({ -// read() { -// this.push(content3); -// this.push(null); -// }, -// }); - -// const joinedStream = joinStreams([ -// stream1, -// stream2, -// stream3, -// ] as unknown as StreamingBlobPayloadOutputTypes[]); - -// const chunks: Buffer[] = []; -// for await (const chunk of joinedStream as any) { -// chunks.push(Buffer.from(chunk)); -// } - -// const joinedContent = Buffer.concat(chunks).toString(); -// expect(joinedContent).toContain(content1.toString()); -// expect(joinedContent).toContain(content2.toString()); -// expect(joinedContent).toContain(content3.toString()); -// } -// }); -// it(`Should handle ${name} streams with no data`, async () => { -// if (name === "Readable") { -// const emptyStream1 = new Readable({ -// read() { -// this.push(null); -// }, -// }); -// const emptyStream2 = new Readable({ -// read() { -// this.push(null); -// }, -// }); - -// const joinedStream = joinStreams([ -// emptyStream1, -// emptyStream2, -// ] as unknown as StreamingBlobPayloadOutputTypes[]); - -// const chunks: Buffer[] = []; -// for await (const chunk of joinedStream as any) { -// chunks.push(Buffer.from(chunk)); -// } -// expect(chunks.length).toBe(0); -// expect(Buffer.concat(chunks).length).toBe(0); -// } -// }); -// it(`Should properly close/cleanup ${name} streams after processing`, async () => { -// if (name === "Readable") { -// const stream1 = new Readable({ -// read() { -// this.push(Buffer.from("data")); -// this.push(null); -// }, -// }); -// const stream2 = new Readable({ -// read() { -// this.push(Buffer.from("more")); -// this.push(null); -// }, -// }); - -// const destroySpy1 = vi.spyOn(stream1, "destroy"); -// const destroySpy2 = vi.spyOn(stream2, "destroy"); - -// const joinedStream = joinStreams([stream1, stream2] as unknown as StreamingBlobPayloadOutputTypes[]); - -// for await (const chunk of joinedStream as any) { -// // consume the data -// } - -// expect(destroySpy1).toHaveBeenCalled(); -// expect(destroySpy2).toHaveBeenCalled(); -// } -// }); -// }); - -// describe("iterateStreams()", () => { -// it(`Should iterate through single ${name} stream`, async () => { -// if (name === "Readable") { -// const stream1 = new Readable({ -// read() { -// this.push(Buffer.from("stream 1")); -// this.push(null); -// }, -// }); -// const iterator = iterateStreams([stream1] as unknown as StreamingBlobPayloadOutputTypes[]); - -// const chunks: string[] = []; -// for await (const chunk of iterator) { -// chunks.push(chunk.toString()); -// } -// expect(chunks).toEqual(["stream 1"]); -// } -// }); -// it(`Should iterate through multiple ${name} streams in order`, async () => { -// if (name === "Readable") { -// const stream1 = new Readable({ -// read() { -// this.push(Buffer.from("stream 1")); -// this.push(null); -// }, -// }); -// const stream2 = new Readable({ -// read() { -// this.push(Buffer.from("stream 2")); -// this.push(null); -// }, -// }); -// const stream3 = new Readable({ -// read() { -// this.push(Buffer.from("stream 3")); -// this.push(null); -// }, -// }); -// const iterator = iterateStreams([ -// stream1, -// stream2, -// stream3, -// ] as unknown as StreamingBlobPayloadOutputTypes[]); - -// const chunks: string[] = []; -// for await (const chunk of iterator) { -// chunks.push(chunk.toString()); -// } -// expect(chunks).toEqual(["stream 1", "stream 2", "stream 3"]); -// } -// }); -// it(`Should call onBytes callback during ${name} iteration`, () => {}); -// it(`Should call onCompletion callback after ${name} iteration completes`, () => {}); -// it(`Should call onFailure callback when ${name} iteration fails`, () => {}); -// it(`Should handle empty ${name} streams during iteration`, () => {}); -// it(`Should track correct byte count across ${name} streams`, () => {}); -// it(`Should maintain correct index during ${name} stream iteration`, () => {}); -// }); -// }); -// }); -// }); +describe("join-streams tests", () => { + const streamTypes = [ + { + name: "Readable", + createStream: () => new Readable({ read() {} }), + supported: true, + streamType: Readable, + }, + { + name: "ReadableStream", + createStream: () => new ReadableStream(), + supported: false, + streamType: ReadableStream, + }, + ]; + + streamTypes.forEach(({ name, createStream, supported, streamType }) => { + describe.skipIf(!supported)(`${name} streams`, () => { + describe("joinStreams()", () => { + it(`Should return single ${name} when only one stream is provided`, async () => { + const stream = createStream(); + const result = await joinStreams([Promise.resolve(stream as unknown as StreamingBlobPayloadOutputTypes)]); + + expect(result).toBeDefined(); + expect(result).not.toBe(stream); + }); + + it(`Should handle empty ${name} streams array`, async () => { + const result = await joinStreams([] as unknown as Promise[]); + expect(result).toBeDefined(); + if (name === "Readable") { + expect(result).toBeInstanceOf(streamType); + } + }); + + it(`Should join multiple ${name} streams into a single stream`, async () => { + const content1 = Buffer.from("Chunk 1"); + const content2 = Buffer.from("Chunk 2"); + const content3 = Buffer.from("Chunk 3"); + + if (name === "Readable") { + const stream1 = new Readable({ + read() { + this.push(content1); + this.push(null); + }, + }); + const stream2 = new Readable({ + read() { + this.push(content2); + this.push(null); + }, + }); + const stream3 = new Readable({ + read() { + this.push(content3); + this.push(null); + }, + }); + + const joinedStream = await joinStreams([ + Promise.resolve(stream1), + Promise.resolve(stream2), + Promise.resolve(stream3), + ] as unknown as Promise[]); + + const chunks: Buffer[] = []; + for await (const chunk of joinedStream as any) { + chunks.push(Buffer.from(chunk)); + } + + const joinedContent = Buffer.concat(chunks).toString(); + expect(joinedContent).toContain(content1.toString()); + expect(joinedContent).toContain(content2.toString()); + expect(joinedContent).toContain(content3.toString()); + } + }); + + it(`Should handle ${name} streams with different chunk sizes`, async () => { + const content1 = Buffer.from("Chunk 1 Chunk 1 Chunk 1"); + const content2 = Buffer.from("Chunk 2"); + const content3 = Buffer.from("Chunk 3 Chunk 3"); + + if (name === "Readable") { + const stream1 = new Readable({ + read() { + this.push(content1); + this.push(null); + }, + }); + const stream2 = new Readable({ + read() { + this.push(content2); + this.push(null); + }, + }); + const stream3 = new Readable({ + read() { + this.push(content3); + this.push(null); + }, + }); + + const joinedStream = await joinStreams([ + Promise.resolve(stream1), + Promise.resolve(stream2), + Promise.resolve(stream3), + ] as unknown as Promise[]); + + const chunks: Buffer[] = []; + for await (const chunk of joinedStream as any) { + chunks.push(Buffer.from(chunk)); + } + + const joinedContent = Buffer.concat(chunks).toString(); + expect(joinedContent).toContain(content1.toString()); + expect(joinedContent).toContain(content2.toString()); + expect(joinedContent).toContain(content3.toString()); + } + }); + + it(`Should handle ${name} streams with no data`, async () => { + if (name === "Readable") { + const emptyStream1 = new Readable({ + read() { + this.push(null); + }, + }); + const emptyStream2 = new Readable({ + read() { + this.push(null); + }, + }); + + const joinedStream = await joinStreams([ + Promise.resolve(emptyStream1), + Promise.resolve(emptyStream2), + ] as unknown as Promise[]); + + const chunks: Buffer[] = []; + for await (const chunk of joinedStream as any) { + chunks.push(Buffer.from(chunk)); + } + expect(chunks.length).toBe(0); + expect(Buffer.concat(chunks).length).toBe(0); + } + }); + + it(`Should report progress via eventListeners`, async () => { + if (name === "Readable") { + const stream1 = new Readable({ + read() { + this.push(Buffer.from("data")); + this.push(null); + }, + }); + const stream2 = new Readable({ + read() { + this.push(Buffer.from("more")); + this.push(null); + }, + }); + + const onBytesSpy = vi.fn(); + const onCompletionSpy = vi.fn(); + + const joinedStream = await joinStreams( + [ + Promise.resolve(stream1), + Promise.resolve(stream2), + ] as unknown as Promise[], + { + onBytes: onBytesSpy, + onCompletion: onCompletionSpy, + } + ); + + for await (const _ of joinedStream as any) { + // consume the data + } + + expect(onBytesSpy).toHaveBeenCalled(); + expect(onCompletionSpy).toHaveBeenCalledWith(expect.any(Number), 1); + } + }); + }); + }); + }); +}); diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts index 81ecba3aca29..23b81389d4df 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -7,7 +7,8 @@ export async function joinStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents ): Promise { - if (isReadableStream(streams[0])) { + const firstStream = await streams[0]; + if (isReadableStream(firstStream)) { const newReadableStream = new ReadableStream({ async start(controller) { for await (const chunk of iterateStreams(streams, eventListeners)) { @@ -32,16 +33,19 @@ export async function* iterateStreams( const stream = await streamPromise; if (isReadableStream(stream)) { const reader = stream.getReader(); - while (true) { - const { done, value } = await reader.read(); - if (done) { - break; + try { + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + yield value; + bytesTransferred += value.byteLength; + eventListeners?.onBytes?.(bytesTransferred, index); } - yield value; - bytesTransferred += value.byteLength; - eventListeners?.onBytes?.(bytesTransferred, index); + } finally { + reader.releaseLock(); } - reader.releaseLock(); } else { const failure = new Error(`unhandled stream type ${(stream as any)?.constructor?.name}`); eventListeners?.onFailure?.(failure, index); diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index d99bb94561c5..56c18e292936 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -9,7 +9,8 @@ export async function joinStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents ): Promise { - if (isReadableStream(streams[0])) { + const firstStream = await streams[0]; + if (isReadableStream(firstStream)) { const newReadableStream = new ReadableStream({ async start(controller) { for await (const chunk of iterateStreams(streams, eventListeners)) { @@ -34,16 +35,19 @@ export async function* iterateStreams( const stream = await streamPromise; if (isReadableStream(stream)) { const reader = stream.getReader(); - while (true) { - const { done, value } = await reader.read(); - if (done) { - break; + try { + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + yield value; + bytesTransferred += value.byteLength; + eventListeners?.onBytes?.(bytesTransferred, index); } - yield value; - bytesTransferred += value.byteLength; - eventListeners?.onBytes?.(bytesTransferred, index); + } finally { + reader.releaseLock(); } - reader.releaseLock(); } else if (stream instanceof Readable) { for await (const chunk of stream) { yield chunk; From 94c1e1bc6e9c52991c0e82071029db643b040dd5 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Tue, 22 Jul 2025 16:24:40 +0000 Subject: [PATCH 05/30] feat: addresses change requests for range and part validation --- .../s3-transfer-manager/S3TransferManager.ts | 40 +++++++++++-------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 17b6c5c0a502..2b127938e48f 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -392,7 +392,6 @@ export class S3TransferManager implements IS3TransferManager { if (initialPart.PartsCount! > 1) { for (let part = 2; part <= initialPart.PartsCount!; part++) { this.checkAborted(transferOptions); - const getObjectRequest = { ...request, PartNumber: part, @@ -401,7 +400,7 @@ export class S3TransferManager implements IS3TransferManager { const getObject = this.s3ClientInstance .send(new GetObjectCommand(getObjectRequest), transferOptions) .then((response) => { - this.validatePartDownload(part, response.ContentRange, partSize ?? 0); + this.validatePartDownload(response.ContentRange, part, partSize ?? 0); if (response.Body && typeof (response.Body as any).getReader === "function") { const reader = (response.Body as any).getReader(); (response.Body as any).getReader = function () { @@ -415,12 +414,11 @@ export class S3TransferManager implements IS3TransferManager { requests.push(getObjectRequest); partCount++; } - } - - if (partCount !== initialPart.PartsCount) { - throw new Error( - `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` - ); + if (partCount !== initialPart.PartsCount) { + throw new Error( + `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` + ); + } } } else { this.checkAborted(transferOptions); @@ -630,18 +628,20 @@ export class S3TransferManager implements IS3TransferManager { } } - private validatePartDownload(partNumber: number, contentRange: string | undefined, partSize: number) { - if (!contentRange) return; + private validatePartDownload(contentRange: string | undefined, partNumber: number, partSize: number) { + if (!contentRange) { + throw new Error(`Missing ContentRange for part ${partNumber}.`); + } const match = contentRange.match(/bytes (\d+)-(\d+)\/(\d+)/); if (!match) throw new Error(`Invalid ContentRange format: ${contentRange}`); const start = Number.parseInt(match[1]); const end = Number.parseInt(match[2]); - const total = Number.parseInt(match[3]); + const total = Number.parseInt(match[3]) - 1; const expectedStart = (partNumber - 1) * partSize; - const expectedEnd = Math.min(expectedStart + partSize - 1, total - 1); + const expectedEnd = Math.min(expectedStart + partSize - 1, total); if (start !== expectedStart) { throw new Error(`Expected part ${partNumber} to start at ${expectedStart} but got ${start}`); @@ -653,14 +653,16 @@ export class S3TransferManager implements IS3TransferManager { } private validateRangeDownload(requestRange: string, responseRange: string | undefined) { - if (!responseRange) return; + if (!responseRange) { + throw new Error(`Missing ContentRange for range ${requestRange}.`); + } const match = responseRange.match(/bytes (\d+)-(\d+)\/(\d+)/); if (!match) throw new Error(`Invalid ContentRange format: ${responseRange}`); const start = Number.parseInt(match[1]); const end = Number.parseInt(match[2]); - const total = Number.parseInt(match[3]); + const total = Number.parseInt(match[3]) - 1; const rangeMatch = requestRange.match(/bytes=(\d+)-(\d+)/); if (!rangeMatch) throw new Error(`Invalid Range format: ${requestRange}`); @@ -672,10 +674,14 @@ export class S3TransferManager implements IS3TransferManager { throw new Error(`Expected range to start at ${expectedStart} but got ${start}`); } - const isFinalPart = end + 1 === total; + if (end === expectedEnd) { + return; + } - if (end !== expectedEnd && !(isFinalPart && end < expectedEnd)) { - throw new Error(`Expected range to end at ${expectedEnd} but got ${end}`); + if (end === total) { + return; } + + throw new Error(`Expected range to end at ${expectedEnd} but got ${end}`); } } From deb53b53b7cf748851c205bccb1cad3c29f9c48d Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Tue, 22 Jul 2025 18:12:02 +0000 Subject: [PATCH 06/30] feat: validate part and range test cases --- .../S3TransferManager.spec.ts | 73 ++++++++++++++++--- 1 file changed, 64 insertions(+), 9 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index 0fa0eb9322c7..96ac3a1cbf38 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -584,7 +584,7 @@ describe("S3TransferManager Unit Tests", () => { for (const { partNumber, range } of ranges) { expect(() => { - tm.validatePartDownload(partNumber, range, partSize); + tm.validatePartDownload(range, partNumber, partSize); }).not.toThrow(); } }); @@ -593,15 +593,15 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartDownload(2, "bytes 5242881-10485759/13631488", partSize); + tm.validatePartDownload("bytes 5242881-10485759/13631488", 2, partSize); }).toThrow("Expected part 2 to start at 5242880 but got 5242881"); expect(() => { - tm.validatePartDownload(2, "bytes 5242879-10485759/13631488", partSize); + tm.validatePartDownload("bytes 5242879-10485759/13631488", 2, partSize); }).toThrow("Expected part 2 to start at 5242880 but got 5242879"); expect(() => { - tm.validatePartDownload(2, "bytes 0-5242879/13631488", partSize); + tm.validatePartDownload("bytes 0-5242879/13631488", 2, partSize); }).toThrow("Expected part 2 to start at 5242880 but got 0"); }); @@ -609,11 +609,11 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartDownload(2, "bytes 5242880-10485760/13631488", partSize); + tm.validatePartDownload("bytes 5242880-10485760/13631488", 2, partSize); }).toThrow("Expected part 2 to end at 10485759 but got 10485760"); expect(() => { - tm.validatePartDownload(3, "bytes 10485760-13631480/13631488", partSize); + tm.validatePartDownload("bytes 10485760-13631480/13631488", 3, partSize); }).toThrow("Expected part 3 to end at 13631487 but got 13631480"); }); @@ -621,7 +621,7 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartDownload(3, "bytes 10485760-13631487/13631488", partSize); + tm.validatePartDownload("bytes 10485760-13631487/13631488", 3, partSize); }).not.toThrow(); }); @@ -629,12 +629,67 @@ describe("S3TransferManager Unit Tests", () => { const partSize = 5242880; expect(() => { - tm.validatePartDownload(2, "invalid-format", partSize); + tm.validatePartDownload("invalid-format", 2, partSize); }).toThrow("Invalid ContentRange format: invalid-format"); }); + + it("Should throw error for missing ContentRange", () => { + const partSize = 5242880; + + expect(() => { + tm.validatePartDownload(undefined, 2, partSize); + }).toThrow("Missing ContentRange for part 2."); + }); }); - // TODO: tests cases for validateRangeDownload() + describe("validateRangeDownload()", () => { + let tm: any; + beforeAll(async () => { + tm = new S3TransferManager() as any; + }, 120_000); + + it("Should pass when response range matches request range", () => { + expect(() => { + tm.validateRangeDownload("bytes=0-5242879", "bytes 0-5242879/13631488"); + }).not.toThrow(); + }); + + it("Should pass when response range ends at total size", () => { + expect(() => { + tm.validateRangeDownload("bytes=10485760-13631500", "bytes 10485760-13631487/13631488"); + }).not.toThrow(); + }); + + it("Should throw error for missing response range", () => { + expect(() => { + tm.validateRangeDownload("bytes=0-5242879", undefined); + }).toThrow("Missing ContentRange for range bytes=0-5242879."); + }); + + it("Should throw error for invalid response range format", () => { + expect(() => { + tm.validateRangeDownload("bytes=0-5242879", "invalid-format"); + }).toThrow("Invalid ContentRange format: invalid-format"); + }); + + it("Should throw error for invalid request range format", () => { + expect(() => { + tm.validateRangeDownload("invalid-format", "bytes 0-5242879/13631488"); + }).toThrow("Invalid Range format: invalid-format"); + }); + + it("Should throw error for incorrect start position", () => { + expect(() => { + tm.validateRangeDownload("bytes=0-5242879", "bytes 1-5242879/13631488"); + }).toThrow("Expected range to start at 0 but got 1"); + }); + + it("Should throw error for incorrect end position", () => { + expect(() => { + tm.validateRangeDownload("bytes=0-5242879", "bytes 0-5242878/13631488"); + }).toThrow("Expected range to end at 5242879 but got 5242878"); + }); + }); }); describe("join-streams tests", () => { From 16264c05d9ac1d5dfd76fda7ef8f9433900ed378 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Tue, 22 Jul 2025 22:01:29 +0000 Subject: [PATCH 07/30] feat: creating tests for join-streams, made browser specific --- lib/lib-storage/package.json | 3 + .../S3TransferManager.browser.spec.ts | 126 ++++++++ .../S3TransferManager.spec.ts | 275 +++++++---------- .../s3-transfer-manager/S3TransferManager.ts | 6 + .../join-streams.browser.ts | 3 +- lib/lib-storage/vitest.config.browser.ts | 8 + yarn.lock | 290 +++++++++++++++++- 7 files changed, 549 insertions(+), 162 deletions(-) create mode 100644 lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts create mode 100644 lib/lib-storage/vitest.config.browser.ts diff --git a/lib/lib-storage/package.json b/lib/lib-storage/package.json index b55eba950877..e86bbbcf5802 100644 --- a/lib/lib-storage/package.json +++ b/lib/lib-storage/package.json @@ -17,6 +17,8 @@ "test": "yarn g:vitest run", "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", "test:watch": "yarn g:vitest watch", + "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", + "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts", "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" }, "engines": { @@ -46,6 +48,7 @@ "@types/node": "^18.19.69", "concurrently": "7.0.0", "downlevel-dts": "0.10.1", + "jsdom": "^26.1.0", "rimraf": "3.0.2", "typescript": "~5.8.3", "web-streams-polyfill": "3.2.1" diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts new file mode 100644 index 000000000000..b8c9a5ff4ff8 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts @@ -0,0 +1,126 @@ +// s3-transfer-manager.browser.spec.ts +import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { sdkStreamMixin } from "@smithy/util-stream"; +import { describe, expect, it, vi } from "vitest"; + +import { joinStreams } from "./join-streams"; + +describe("join-streams tests", () => { + const createReadableStreamWithContent = (content: Uint8Array) => + new ReadableStream({ + start(controller) { + controller.enqueue(content); + controller.close(); + }, + }); + + const createEmptyReadableStream = () => + new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const consumeReadableStream = async (stream: any): Promise => { + const reader = stream.getReader(); + const chunks: Uint8Array[] = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + if (value) chunks.push(value); + } + + return chunks; + }; + + const testCases = [ + { + name: "ReadableStream", + createStream: () => new ReadableStream(), + createWithContent: createReadableStreamWithContent, + createEmpty: createEmptyReadableStream, + consume: consumeReadableStream, + isInstance: (stream: any) => typeof stream.getReader === "function", + }, + ]; + + testCases.forEach(({ name, createStream, createWithContent, createEmpty, consume, isInstance }) => { + describe(`joinStreams() with ${name}`, () => { + it("should return single stream when only one stream is provided", async () => { + const stream = createStream(); + const mixedStream = sdkStreamMixin(stream); + const result = await joinStreams([Promise.resolve(mixedStream as StreamingBlobPayloadOutputTypes)]); + + expect(result).toBeDefined(); + expect(result).not.toBe(stream); + expect(isInstance(result)).toBe(true); + }); + + it("should join multiple streams into a single stream", async () => { + const contents = [ + new Uint8Array([67, 104, 117, 110, 107, 32, 49]), // "Chunk 1" + new Uint8Array([67, 104, 117, 110, 107, 32, 50]), // "Chunk 2" + new Uint8Array([67, 104, 117, 110, 107, 32, 51]), // "Chunk 3" + ]; + + const streams = contents.map((content) => + Promise.resolve(sdkStreamMixin(createWithContent(content)) as StreamingBlobPayloadOutputTypes) + ); + + const joinedStream = await joinStreams(streams); + + const chunks = await consume(joinedStream); + + expect(chunks.length).toBe(contents.length); + chunks.forEach((chunk, i) => { + expect(chunk).toEqual(contents[i]); + }); + }); + + it("should handle streams with no data", async () => { + const streams = [ + Promise.resolve(sdkStreamMixin(createEmpty()) as StreamingBlobPayloadOutputTypes), + Promise.resolve(sdkStreamMixin(createEmpty()) as StreamingBlobPayloadOutputTypes), + ]; + + const joinedStream = await joinStreams(streams); + + const chunks = await consume(joinedStream); + + expect(chunks.length).toBe(0); + }); + + it("should report progress via eventListeners", async () => { + const streams = [ + Promise.resolve( + sdkStreamMixin(createWithContent(new Uint8Array([100, 97, 116, 97]))) as StreamingBlobPayloadOutputTypes + ), // "data" + Promise.resolve( + sdkStreamMixin(createWithContent(new Uint8Array([109, 111, 114, 101]))) as StreamingBlobPayloadOutputTypes + ), // "more" + ]; + + const onBytesSpy = vi.fn(); + const onCompletionSpy = vi.fn(); + + const joinedStream = await joinStreams(streams, { + onBytes: onBytesSpy, + onCompletion: onCompletionSpy, + }); + + await consume(joinedStream); + + expect(onBytesSpy).toHaveBeenCalled(); + expect(onCompletionSpy).toHaveBeenCalledWith(expect.any(Number), 1); + }); + + it("should throw error for unsupported stream types", async () => { + const blob = new Blob(["test"]); + await expect( + joinStreams([Promise.resolve(blob as unknown as StreamingBlobPayloadOutputTypes)]) + ).rejects.toThrow("Unsupported Stream Type"); + }); + }); + }); +}); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index 96ac3a1cbf38..5ae9702023f8 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -697,185 +697,146 @@ describe("join-streams tests", () => { { name: "Readable", createStream: () => new Readable({ read() {} }), - supported: true, streamType: Readable, }, { name: "ReadableStream", createStream: () => new ReadableStream(), - supported: false, streamType: ReadableStream, }, ]; - streamTypes.forEach(({ name, createStream, supported, streamType }) => { - describe.skipIf(!supported)(`${name} streams`, () => { - describe("joinStreams()", () => { - it(`Should return single ${name} when only one stream is provided`, async () => { + describe("join-streams tests", () => { + const createReadableWithContent = (content: Buffer) => + new Readable({ + read() { + this.push(content); + this.push(null); + }, + }); + + const createEmptyReadable = () => + new Readable({ + read() { + this.push(null); + }, + }); + + const createReadableStreamWithContent = (content: Uint8Array) => + new ReadableStream({ + start(controller) { + controller.enqueue(content); + controller.close(); + }, + }); + + const createEmptyReadableStream = () => + new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const consumeReadable = async (stream: any): Promise => { + const chunks: Buffer[] = []; + for await (const chunk of stream) { + chunks.push(Buffer.from(chunk)); + } + return Buffer.concat(chunks); + }; + + const consumeReadableStream = async (stream: any): Promise => { + const reader = stream.getReader(); + const chunks: Uint8Array[] = []; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + if (value) chunks.push(value); + } + + return chunks; + }; + + const testCases = [ + { + name: "Readable", + createStream: () => new Readable({ read() {} }), + createWithContent: createReadableWithContent, + createEmpty: createEmptyReadable, + consume: consumeReadable, + isInstance: (stream: any) => stream instanceof Readable, + }, + { + name: "ReadableStream", + createStream: () => new ReadableStream(), + createWithContent: createReadableStreamWithContent, + createEmpty: createEmptyReadableStream, + consume: consumeReadableStream, + isInstance: (stream: any) => typeof stream.getReader === "function", + }, + ]; + + testCases.forEach(({ name, createStream, createWithContent, createEmpty, consume, isInstance }) => { + describe(`joinStreams() with ${name}`, () => { + it("should return single stream when only one stream is provided", async () => { const stream = createStream(); const result = await joinStreams([Promise.resolve(stream as unknown as StreamingBlobPayloadOutputTypes)]); expect(result).toBeDefined(); expect(result).not.toBe(stream); + expect(isInstance(result)).toBe(true); }); - it(`Should handle empty ${name} streams array`, async () => { - const result = await joinStreams([] as unknown as Promise[]); - expect(result).toBeDefined(); - if (name === "Readable") { - expect(result).toBeInstanceOf(streamType); - } - }); + it("should join multiple streams into a single stream", async () => { + const contents = [Buffer.from("Chunk 1"), Buffer.from("Chunk 2"), Buffer.from("Chunk 3")]; - it(`Should join multiple ${name} streams into a single stream`, async () => { - const content1 = Buffer.from("Chunk 1"); - const content2 = Buffer.from("Chunk 2"); - const content3 = Buffer.from("Chunk 3"); - - if (name === "Readable") { - const stream1 = new Readable({ - read() { - this.push(content1); - this.push(null); - }, - }); - const stream2 = new Readable({ - read() { - this.push(content2); - this.push(null); - }, - }); - const stream3 = new Readable({ - read() { - this.push(content3); - this.push(null); - }, - }); - - const joinedStream = await joinStreams([ - Promise.resolve(stream1), - Promise.resolve(stream2), - Promise.resolve(stream3), - ] as unknown as Promise[]); - - const chunks: Buffer[] = []; - for await (const chunk of joinedStream as any) { - chunks.push(Buffer.from(chunk)); - } - - const joinedContent = Buffer.concat(chunks).toString(); - expect(joinedContent).toContain(content1.toString()); - expect(joinedContent).toContain(content2.toString()); - expect(joinedContent).toContain(content3.toString()); - } - }); + const streams = contents.map((content) => + Promise.resolve(createWithContent(content) as unknown as StreamingBlobPayloadOutputTypes) + ); - it(`Should handle ${name} streams with different chunk sizes`, async () => { - const content1 = Buffer.from("Chunk 1 Chunk 1 Chunk 1"); - const content2 = Buffer.from("Chunk 2"); - const content3 = Buffer.from("Chunk 3 Chunk 3"); - - if (name === "Readable") { - const stream1 = new Readable({ - read() { - this.push(content1); - this.push(null); - }, - }); - const stream2 = new Readable({ - read() { - this.push(content2); - this.push(null); - }, - }); - const stream3 = new Readable({ - read() { - this.push(content3); - this.push(null); - }, - }); - - const joinedStream = await joinStreams([ - Promise.resolve(stream1), - Promise.resolve(stream2), - Promise.resolve(stream3), - ] as unknown as Promise[]); - - const chunks: Buffer[] = []; - for await (const chunk of joinedStream as any) { - chunks.push(Buffer.from(chunk)); - } - - const joinedContent = Buffer.concat(chunks).toString(); - expect(joinedContent).toContain(content1.toString()); - expect(joinedContent).toContain(content2.toString()); - expect(joinedContent).toContain(content3.toString()); - } + const joinedStream = await joinStreams(streams); + + const chunks = await consume(joinedStream); + + const joinedContent = Buffer.isBuffer(chunks) ? chunks.toString() : Buffer.concat(chunks).toString(); + contents.forEach((content) => { + expect(joinedContent).toContain(content.toString()); + }); }); - it(`Should handle ${name} streams with no data`, async () => { - if (name === "Readable") { - const emptyStream1 = new Readable({ - read() { - this.push(null); - }, - }); - const emptyStream2 = new Readable({ - read() { - this.push(null); - }, - }); - - const joinedStream = await joinStreams([ - Promise.resolve(emptyStream1), - Promise.resolve(emptyStream2), - ] as unknown as Promise[]); - - const chunks: Buffer[] = []; - for await (const chunk of joinedStream as any) { - chunks.push(Buffer.from(chunk)); - } - expect(chunks.length).toBe(0); - expect(Buffer.concat(chunks).length).toBe(0); - } + it("should handle streams with no data", async () => { + const streams = [ + Promise.resolve(createEmpty() as unknown as StreamingBlobPayloadOutputTypes), + Promise.resolve(createEmpty() as unknown as StreamingBlobPayloadOutputTypes), + ]; + + const joinedStream = await joinStreams(streams); + + const chunks = await consume(joinedStream); + + const length = Array.isArray(chunks) ? chunks.length : chunks.length; + expect(length).toBe(0); }); - it(`Should report progress via eventListeners`, async () => { - if (name === "Readable") { - const stream1 = new Readable({ - read() { - this.push(Buffer.from("data")); - this.push(null); - }, - }); - const stream2 = new Readable({ - read() { - this.push(Buffer.from("more")); - this.push(null); - }, - }); - - const onBytesSpy = vi.fn(); - const onCompletionSpy = vi.fn(); - - const joinedStream = await joinStreams( - [ - Promise.resolve(stream1), - Promise.resolve(stream2), - ] as unknown as Promise[], - { - onBytes: onBytesSpy, - onCompletion: onCompletionSpy, - } - ); - - for await (const _ of joinedStream as any) { - // consume the data - } - - expect(onBytesSpy).toHaveBeenCalled(); - expect(onCompletionSpy).toHaveBeenCalledWith(expect.any(Number), 1); - } + it("should report progress via eventListeners", async () => { + const streams = [ + Promise.resolve(createWithContent(Buffer.from("data")) as unknown as StreamingBlobPayloadOutputTypes), + Promise.resolve(createWithContent(Buffer.from("more")) as unknown as StreamingBlobPayloadOutputTypes), + ]; + + const onBytesSpy = vi.fn(); + const onCompletionSpy = vi.fn(); + + const joinedStream = await joinStreams(streams, { + onBytes: onBytesSpy, + onCompletion: onCompletionSpy, + }); + + await consume(joinedStream); + + expect(onBytesSpy).toHaveBeenCalled(); + expect(onCompletionSpy).toHaveBeenCalledWith(expect.any(Number), 1); }); }); }); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 2b127938e48f..6cf2f4924bd3 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -23,6 +23,12 @@ import type { UploadResponse, } from "./types"; +/** + * Describe what this is + * TODO: Switch all @public to @alpha + * TODO: tag internal for itneral functions + * @alpha + */ export class S3TransferManager implements IS3TransferManager { private static MIN_PART_SIZE = 5 * 1024 * 1024; // 5MB private static DEFAULT_PART_SIZE = 8 * 1024 * 1024; // 8MB diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts index 23b81389d4df..3543d0dbb0a6 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -1,5 +1,5 @@ import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; -import { isBlob, isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; +import { isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; import { JoinStreamIterationEvents } from "./types"; @@ -8,6 +8,7 @@ export async function joinStreams( eventListeners?: JoinStreamIterationEvents ): Promise { const firstStream = await streams[0]; + console.log("TESTING BROWSER VERSION"); if (isReadableStream(firstStream)) { const newReadableStream = new ReadableStream({ async start(controller) { diff --git a/lib/lib-storage/vitest.config.browser.ts b/lib/lib-storage/vitest.config.browser.ts new file mode 100644 index 000000000000..56f99838b736 --- /dev/null +++ b/lib/lib-storage/vitest.config.browser.ts @@ -0,0 +1,8 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + include: ["**/*.browser.spec.ts"], + environment: "jsdom", + }, +}); diff --git a/yarn.lock b/yarn.lock index fef234f88dfc..a7f38c463fad 100644 --- a/yarn.lock +++ b/yarn.lock @@ -15,6 +15,19 @@ __metadata: languageName: node linkType: hard +"@asamuzakjp/css-color@npm:^3.2.0": + version: 3.2.0 + resolution: "@asamuzakjp/css-color@npm:3.2.0" + dependencies: + "@csstools/css-calc": "npm:^2.1.3" + "@csstools/css-color-parser": "npm:^3.0.9" + "@csstools/css-parser-algorithms": "npm:^3.0.4" + "@csstools/css-tokenizer": "npm:^3.0.3" + lru-cache: "npm:^10.4.3" + checksum: 10c0/a4bf1c831751b1fae46b437e37e8a38c0b5bd58d23230157ae210bd1e905fe509b89b7c243e63d1522d852668a6292ed730a160e21342772b4e5b7b8ea14c092 + languageName: node + linkType: hard + "@aws-crypto/crc32@npm:5.2.0": version: 5.2.0 resolution: "@aws-crypto/crc32@npm:5.2.0" @@ -23752,6 +23765,7 @@ __metadata: concurrently: "npm:7.0.0" downlevel-dts: "npm:0.10.1" events: "npm:3.3.0" + jsdom: "npm:^26.1.0" rimraf: "npm:3.0.2" stream-browserify: "npm:3.0.0" tslib: "npm:^2.6.2" @@ -25576,6 +25590,52 @@ __metadata: languageName: node linkType: hard +"@csstools/color-helpers@npm:^5.0.2": + version: 5.0.2 + resolution: "@csstools/color-helpers@npm:5.0.2" + checksum: 10c0/bebaddb28b9eb58b0449edd5d0c0318fa88f3cb079602ee27e88c9118070d666dcc4e09a5aa936aba2fde6ba419922ade07b7b506af97dd7051abd08dfb2959b + languageName: node + linkType: hard + +"@csstools/css-calc@npm:^2.1.3, @csstools/css-calc@npm:^2.1.4": + version: 2.1.4 + resolution: "@csstools/css-calc@npm:2.1.4" + peerDependencies: + "@csstools/css-parser-algorithms": ^3.0.5 + "@csstools/css-tokenizer": ^3.0.4 + checksum: 10c0/42ce5793e55ec4d772083808a11e9fb2dfe36db3ec168713069a276b4c3882205b3507c4680224c28a5d35fe0bc2d308c77f8f2c39c7c09aad8747708eb8ddd8 + languageName: node + linkType: hard + +"@csstools/css-color-parser@npm:^3.0.9": + version: 3.0.10 + resolution: "@csstools/css-color-parser@npm:3.0.10" + dependencies: + "@csstools/color-helpers": "npm:^5.0.2" + "@csstools/css-calc": "npm:^2.1.4" + peerDependencies: + "@csstools/css-parser-algorithms": ^3.0.5 + "@csstools/css-tokenizer": ^3.0.4 + checksum: 10c0/8f8a2395b117c2f09366b5c9bf49bc740c92a65b6330fe3cc1e76abafd0d1000e42a657d7b0a3814846a66f1d69896142f7e36d7a4aca77de977e5cc5f944747 + languageName: node + linkType: hard + +"@csstools/css-parser-algorithms@npm:^3.0.4": + version: 3.0.5 + resolution: "@csstools/css-parser-algorithms@npm:3.0.5" + peerDependencies: + "@csstools/css-tokenizer": ^3.0.4 + checksum: 10c0/d9a1c888bd43849ae3437ca39251d5c95d2c8fd6b5ccdb7c45491dfd2c1cbdc3075645e80901d120e4d2c1993db9a5b2d83793b779dbbabcfb132adb142eb7f7 + languageName: node + linkType: hard + +"@csstools/css-tokenizer@npm:^3.0.3": + version: 3.0.4 + resolution: "@csstools/css-tokenizer@npm:3.0.4" + checksum: 10c0/3b589f8e9942075a642213b389bab75a2d50d05d203727fcdac6827648a5572674caff07907eff3f9a2389d86a4ee47308fafe4f8588f4a77b7167c588d2559f + languageName: node + linkType: hard + "@cucumber/ci-environment@npm:9.1.0": version: 9.1.0 resolution: "@cucumber/ci-environment@npm:9.1.0" @@ -32440,6 +32500,16 @@ __metadata: languageName: node linkType: hard +"cssstyle@npm:^4.2.1": + version: 4.6.0 + resolution: "cssstyle@npm:4.6.0" + dependencies: + "@asamuzakjp/css-color": "npm:^3.2.0" + rrweb-cssom: "npm:^0.8.0" + checksum: 10c0/71add1b0ffafa1bedbef6855db6189b9523d3320e015a0bf3fbd504760efb9a81e1f1a225228d5fa892ee58e56d06994ca372e7f4e461cda7c4c9985fe075f65 + languageName: node + linkType: hard + "d@npm:1, d@npm:^1.0.1, d@npm:^1.0.2": version: 1.0.2 resolution: "d@npm:1.0.2" @@ -32466,6 +32536,16 @@ __metadata: languageName: node linkType: hard +"data-urls@npm:^5.0.0": + version: 5.0.0 + resolution: "data-urls@npm:5.0.0" + dependencies: + whatwg-mimetype: "npm:^4.0.0" + whatwg-url: "npm:^14.0.0" + checksum: 10c0/1b894d7d41c861f3a4ed2ae9b1c3f0909d4575ada02e36d3d3bc584bdd84278e20709070c79c3b3bff7ac98598cb191eb3e86a89a79ea4ee1ef360e1694f92ad + languageName: node + linkType: hard + "date-fns@npm:^2.16.1": version: 2.30.0 resolution: "date-fns@npm:2.30.0" @@ -32546,6 +32626,13 @@ __metadata: languageName: node linkType: hard +"decimal.js@npm:^10.5.0": + version: 10.6.0 + resolution: "decimal.js@npm:10.6.0" + checksum: 10c0/07d69fbcc54167a340d2d97de95f546f9ff1f69d2b45a02fd7a5292412df3cd9eb7e23065e532a318f5474a2e1bccf8392fdf0443ef467f97f3bf8cb0477e5aa + languageName: node + linkType: hard + "decomment@npm:0.9.5": version: 0.9.5 resolution: "decomment@npm:0.9.5" @@ -32965,6 +33052,13 @@ __metadata: languageName: node linkType: hard +"entities@npm:^6.0.0": + version: 6.0.1 + resolution: "entities@npm:6.0.1" + checksum: 10c0/ed836ddac5acb34341094eb495185d527bd70e8632b6c0d59548cbfa23defdbae70b96f9a405c82904efa421230b5b3fd2283752447d737beffd3f3e6ee74414 + languageName: node + linkType: hard + "env-paths@npm:^2.2.0": version: 2.2.1 resolution: "env-paths@npm:2.2.1" @@ -34968,6 +35062,15 @@ __metadata: languageName: node linkType: hard +"html-encoding-sniffer@npm:^4.0.0": + version: 4.0.0 + resolution: "html-encoding-sniffer@npm:4.0.0" + dependencies: + whatwg-encoding: "npm:^3.1.1" + checksum: 10c0/523398055dc61ac9b34718a719cb4aa691e4166f29187e211e1607de63dc25ac7af52ca7c9aead0c4b3c0415ffecb17326396e1202e2e86ff4bca4c0ee4c6140 + languageName: node + linkType: hard + "html-escaper@npm:^2.0.0": version: 2.0.2 resolution: "html-escaper@npm:2.0.2" @@ -35006,7 +35109,7 @@ __metadata: languageName: node linkType: hard -"http-proxy-agent@npm:^7.0.0": +"http-proxy-agent@npm:^7.0.0, http-proxy-agent@npm:^7.0.2": version: 7.0.2 resolution: "http-proxy-agent@npm:7.0.2" dependencies: @@ -35044,7 +35147,7 @@ __metadata: languageName: node linkType: hard -"https-proxy-agent@npm:^7.0.1": +"https-proxy-agent@npm:^7.0.1, https-proxy-agent@npm:^7.0.6": version: 7.0.6 resolution: "https-proxy-agent@npm:7.0.6" dependencies: @@ -35107,7 +35210,7 @@ __metadata: languageName: node linkType: hard -"iconv-lite@npm:^0.6.2": +"iconv-lite@npm:0.6.3, iconv-lite@npm:^0.6.2": version: 0.6.3 resolution: "iconv-lite@npm:0.6.3" dependencies: @@ -35454,6 +35557,13 @@ __metadata: languageName: node linkType: hard +"is-potential-custom-element-name@npm:^1.0.1": + version: 1.0.1 + resolution: "is-potential-custom-element-name@npm:1.0.1" + checksum: 10c0/b73e2f22bc863b0939941d369486d308b43d7aef1f9439705e3582bfccaa4516406865e32c968a35f97a99396dac84e2624e67b0a16b0a15086a785e16ce7db9 + languageName: node + linkType: hard + "is-promise@npm:^2.1.0": version: 2.2.2 resolution: "is-promise@npm:2.2.2" @@ -36635,6 +36745,39 @@ __metadata: languageName: node linkType: hard +"jsdom@npm:^26.1.0": + version: 26.1.0 + resolution: "jsdom@npm:26.1.0" + dependencies: + cssstyle: "npm:^4.2.1" + data-urls: "npm:^5.0.0" + decimal.js: "npm:^10.5.0" + html-encoding-sniffer: "npm:^4.0.0" + http-proxy-agent: "npm:^7.0.2" + https-proxy-agent: "npm:^7.0.6" + is-potential-custom-element-name: "npm:^1.0.1" + nwsapi: "npm:^2.2.16" + parse5: "npm:^7.2.1" + rrweb-cssom: "npm:^0.8.0" + saxes: "npm:^6.0.0" + symbol-tree: "npm:^3.2.4" + tough-cookie: "npm:^5.1.1" + w3c-xmlserializer: "npm:^5.0.0" + webidl-conversions: "npm:^7.0.0" + whatwg-encoding: "npm:^3.1.1" + whatwg-mimetype: "npm:^4.0.0" + whatwg-url: "npm:^14.1.1" + ws: "npm:^8.18.0" + xml-name-validator: "npm:^5.0.0" + peerDependencies: + canvas: ^3.0.0 + peerDependenciesMeta: + canvas: + optional: true + checksum: 10c0/5b14a5bc32ce077a06fb42d1ab95b1191afa5cbbce8859e3b96831c5143becbbcbf0511d4d4934e922d2901443ced2cdc3b734c1cf30b5f73b3e067ce457d0f4 + languageName: node + linkType: hard + "jsesc@npm:^3.0.2": version: 3.1.0 resolution: "jsesc@npm:3.1.0" @@ -37254,7 +37397,7 @@ __metadata: languageName: node linkType: hard -"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0": +"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0, lru-cache@npm:^10.4.3": version: 10.4.3 resolution: "lru-cache@npm:10.4.3" checksum: 10c0/ebd04fbca961e6c1d6c0af3799adcc966a1babe798f685bb84e6599266599cd95d94630b10262f5424539bc4640107e8a33aa28585374abf561d30d16f4b39fb @@ -38299,6 +38442,13 @@ __metadata: languageName: node linkType: hard +"nwsapi@npm:^2.2.16": + version: 2.2.20 + resolution: "nwsapi@npm:2.2.20" + checksum: 10c0/07f4dafa3186aef7c007863e90acd4342a34ba9d44b22f14f644fdb311f6086887e21c2fc15efaa826c2bc39ab2bc841364a1a630e7c87e0cb723ba59d729297 + languageName: node + linkType: hard + "nx@npm:15.9.7, nx@npm:>=14.6.1 < 16": version: 15.9.7 resolution: "nx@npm:15.9.7" @@ -38788,6 +38938,15 @@ __metadata: languageName: node linkType: hard +"parse5@npm:^7.2.1": + version: 7.3.0 + resolution: "parse5@npm:7.3.0" + dependencies: + entities: "npm:^6.0.0" + checksum: 10c0/7fd2e4e247e85241d6f2a464d0085eed599a26d7b0a5233790c49f53473232eb85350e8133344d9b3fd58b89339e7ad7270fe1f89d28abe50674ec97b87f80b5 + languageName: node + linkType: hard + "parseurl@npm:~1.3.3": version: 1.3.3 resolution: "parseurl@npm:1.3.3" @@ -39959,6 +40118,13 @@ __metadata: languageName: node linkType: hard +"rrweb-cssom@npm:^0.8.0": + version: 0.8.0 + resolution: "rrweb-cssom@npm:0.8.0" + checksum: 10c0/56f2bfd56733adb92c0b56e274c43f864b8dd48784d6fe946ef5ff8d438234015e59ad837fc2ad54714b6421384141c1add4eb569e72054e350d1f8a50b8ac7b + languageName: node + linkType: hard + "rsvp@npm:^4.8.4": version: 4.8.5 resolution: "rsvp@npm:4.8.5" @@ -40056,6 +40222,15 @@ __metadata: languageName: node linkType: hard +"saxes@npm:^6.0.0": + version: 6.0.0 + resolution: "saxes@npm:6.0.0" + dependencies: + xmlchars: "npm:^2.2.0" + checksum: 10c0/3847b839f060ef3476eb8623d099aa502ad658f5c40fd60c105ebce86d244389b0d76fcae30f4d0c728d7705ceb2f7e9b34bb54717b6a7dbedaf5dad2d9a4b74 + languageName: node + linkType: hard + "schema-utils@npm:^3.1.0": version: 3.3.0 resolution: "schema-utils@npm:3.3.0" @@ -40885,6 +41060,13 @@ __metadata: languageName: node linkType: hard +"symbol-tree@npm:^3.2.4": + version: 3.2.4 + resolution: "symbol-tree@npm:3.2.4" + checksum: 10c0/dfbe201ae09ac6053d163578778c53aa860a784147ecf95705de0cd23f42c851e1be7889241495e95c37cabb058edb1052f141387bef68f705afc8f9dd358509 + languageName: node + linkType: hard + "tapable@npm:^2.1.1, tapable@npm:^2.2.0": version: 2.2.1 resolution: "tapable@npm:2.2.1" @@ -41132,6 +41314,24 @@ __metadata: languageName: node linkType: hard +"tldts-core@npm:^6.1.86": + version: 6.1.86 + resolution: "tldts-core@npm:6.1.86" + checksum: 10c0/8133c29375f3f99f88fce5f4d62f6ecb9532b106f31e5423b27c1eb1b6e711bd41875184a456819ceaed5c8b94f43911b1ad57e25c6eb86e1fc201228ff7e2af + languageName: node + linkType: hard + +"tldts@npm:^6.1.32": + version: 6.1.86 + resolution: "tldts@npm:6.1.86" + dependencies: + tldts-core: "npm:^6.1.86" + bin: + tldts: bin/cli.js + checksum: 10c0/27ae7526d9d78cb97b2de3f4d102e0b4321d1ccff0648a7bb0e039ed54acbce86bacdcd9cd3c14310e519b457854e7bafbef1f529f58a1e217a737ced63f0940 + languageName: node + linkType: hard + "tmp@npm:^0.0.33": version: 0.0.33 resolution: "tmp@npm:0.0.33" @@ -41195,6 +41395,15 @@ __metadata: languageName: node linkType: hard +"tough-cookie@npm:^5.1.1": + version: 5.1.2 + resolution: "tough-cookie@npm:5.1.2" + dependencies: + tldts: "npm:^6.1.32" + checksum: 10c0/5f95023a47de0f30a902bba951664b359725597d8adeabc66a0b93a931c3af801e1e697dae4b8c21a012056c0ea88bd2bf4dfe66b2adcf8e2f42cd9796fe0626 + languageName: node + linkType: hard + "tough-cookie@npm:~2.5.0": version: 2.5.0 resolution: "tough-cookie@npm:2.5.0" @@ -41205,6 +41414,15 @@ __metadata: languageName: node linkType: hard +"tr46@npm:^5.1.0": + version: 5.1.1 + resolution: "tr46@npm:5.1.1" + dependencies: + punycode: "npm:^2.3.1" + checksum: 10c0/ae270e194d52ec67ebd695c1a42876e0f19b96e4aca2ab464ab1d9d17dc3acd3e18764f5034c93897db73421563be27c70c98359c4501136a497e46deda5d5ec + languageName: node + linkType: hard + "tr46@npm:~0.0.3": version: 0.0.3 resolution: "tr46@npm:0.0.3" @@ -42277,6 +42495,15 @@ __metadata: languageName: node linkType: hard +"w3c-xmlserializer@npm:^5.0.0": + version: 5.0.0 + resolution: "w3c-xmlserializer@npm:5.0.0" + dependencies: + xml-name-validator: "npm:^5.0.0" + checksum: 10c0/8712774c1aeb62dec22928bf1cdfd11426c2c9383a1a63f2bcae18db87ca574165a0fbe96b312b73652149167ac6c7f4cf5409f2eb101d9c805efe0e4bae798b + languageName: node + linkType: hard + "walk-up-path@npm:^1.0.0": version: 1.0.0 resolution: "walk-up-path@npm:1.0.0" @@ -42421,6 +42648,15 @@ __metadata: languageName: node linkType: hard +"whatwg-encoding@npm:^3.1.1": + version: 3.1.1 + resolution: "whatwg-encoding@npm:3.1.1" + dependencies: + iconv-lite: "npm:0.6.3" + checksum: 10c0/273b5f441c2f7fda3368a496c3009edbaa5e43b71b09728f90425e7f487e5cef9eb2b846a31bd760dd8077739c26faf6b5ca43a5f24033172b003b72cf61a93e + languageName: node + linkType: hard + "whatwg-mimetype@npm:^3.0.0": version: 3.0.0 resolution: "whatwg-mimetype@npm:3.0.0" @@ -42428,6 +42664,23 @@ __metadata: languageName: node linkType: hard +"whatwg-mimetype@npm:^4.0.0": + version: 4.0.0 + resolution: "whatwg-mimetype@npm:4.0.0" + checksum: 10c0/a773cdc8126b514d790bdae7052e8bf242970cebd84af62fb2f35a33411e78e981f6c0ab9ed1fe6ec5071b09d5340ac9178e05b52d35a9c4bcf558ba1b1551df + languageName: node + linkType: hard + +"whatwg-url@npm:^14.0.0, whatwg-url@npm:^14.1.1": + version: 14.2.0 + resolution: "whatwg-url@npm:14.2.0" + dependencies: + tr46: "npm:^5.1.0" + webidl-conversions: "npm:^7.0.0" + checksum: 10c0/f746fc2f4c906607d09537de1227b13f9494c171141e5427ed7d2c0dd0b6a48b43d8e71abaae57d368d0c06b673fd8ec63550b32ad5ed64990c7b0266c2b4272 + languageName: node + linkType: hard + "whatwg-url@npm:^5.0.0": version: 5.0.0 resolution: "whatwg-url@npm:5.0.0" @@ -42651,6 +42904,35 @@ __metadata: languageName: node linkType: hard +"ws@npm:^8.18.0": + version: 8.18.3 + resolution: "ws@npm:8.18.3" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: 10c0/eac918213de265ef7cb3d4ca348b891a51a520d839aa51cdb8ca93d4fa7ff9f6ccb339ccee89e4075324097f0a55157c89fa3f7147bde9d8d7e90335dc087b53 + languageName: node + linkType: hard + +"xml-name-validator@npm:^5.0.0": + version: 5.0.0 + resolution: "xml-name-validator@npm:5.0.0" + checksum: 10c0/3fcf44e7b73fb18be917fdd4ccffff3639373c7cb83f8fc35df6001fecba7942f1dbead29d91ebb8315e2f2ff786b508f0c9dc0215b6353f9983c6b7d62cb1f5 + languageName: node + linkType: hard + +"xmlchars@npm:^2.2.0": + version: 2.2.0 + resolution: "xmlchars@npm:2.2.0" + checksum: 10c0/b64b535861a6f310c5d9bfa10834cf49127c71922c297da9d4d1b45eeaae40bf9b4363275876088fbe2667e5db028d2cd4f8ee72eed9bede840a67d57dab7593 + languageName: node + linkType: hard + "xtend@npm:^4.0.0, xtend@npm:^4.0.2, xtend@npm:~4.0.0, xtend@npm:~4.0.1": version: 4.0.2 resolution: "xtend@npm:4.0.2" From e558dea70e697e0ffc73c74320b10bdd499c2c43 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 23 Jul 2025 17:09:11 +0000 Subject: [PATCH 08/30] feat: changed join-streams path in browser.spec --- .../S3TransferManager.browser.spec.ts | 26 +++++++++++++++++-- .../S3TransferManager.spec.ts | 19 ++++++++++++++ .../join-streams.browser.ts | 2 +- .../src/s3-transfer-manager/join-streams.ts | 1 + 4 files changed, 45 insertions(+), 3 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts index b8c9a5ff4ff8..e314b2efb2e8 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.browser.spec.ts @@ -1,9 +1,8 @@ -// s3-transfer-manager.browser.spec.ts import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; import { sdkStreamMixin } from "@smithy/util-stream"; import { describe, expect, it, vi } from "vitest"; -import { joinStreams } from "./join-streams"; +import { joinStreams } from "./join-streams.browser"; describe("join-streams tests", () => { const createReadableStreamWithContent = (content: Uint8Array) => @@ -78,6 +77,29 @@ describe("join-streams tests", () => { }); }); + it("should handle consecutive calls of joining multiple streams into a single stream", async () => { + for (let i = 0; i <= 3; i++) { + const contents = [ + new Uint8Array([67, 104, 117, 110, 107, 32, 49]), // "Chunk 1" + new Uint8Array([67, 104, 117, 110, 107, 32, 50]), // "Chunk 2" + new Uint8Array([67, 104, 117, 110, 107, 32, 51]), // "Chunk 3" + ]; + + const streams = contents.map((content) => + Promise.resolve(sdkStreamMixin(createWithContent(content)) as StreamingBlobPayloadOutputTypes) + ); + + const joinedStream = await joinStreams(streams); + + const chunks = await consume(joinedStream); + + expect(chunks.length).toBe(contents.length); + chunks.forEach((chunk, i) => { + expect(chunk).toEqual(contents[i]); + }); + } + }); + it("should handle streams with no data", async () => { const streams = [ Promise.resolve(sdkStreamMixin(createEmpty()) as StreamingBlobPayloadOutputTypes), diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index 5ae9702023f8..adce0d265fe6 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -805,6 +805,25 @@ describe("join-streams tests", () => { }); }); + it("should handle consecutive calls of joining multiple streams into a single stream", async () => { + for (let i = 0; i <= 3; i++) { + const contents = [Buffer.from("Chunk 1"), Buffer.from("Chunk 2"), Buffer.from("Chunk 3")]; + + const streams = contents.map((content) => + Promise.resolve(createWithContent(content) as unknown as StreamingBlobPayloadOutputTypes) + ); + + const joinedStream = await joinStreams(streams); + + const chunks = await consume(joinedStream); + + const joinedContent = Buffer.isBuffer(chunks) ? chunks.toString() : Buffer.concat(chunks).toString(); + contents.forEach((content) => { + expect(joinedContent).toContain(content.toString()); + }); + } + }); + it("should handle streams with no data", async () => { const streams = [ Promise.resolve(createEmpty() as unknown as StreamingBlobPayloadOutputTypes), diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts index 3543d0dbb0a6..bd9eb18f6dbe 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -8,7 +8,6 @@ export async function joinStreams( eventListeners?: JoinStreamIterationEvents ): Promise { const firstStream = await streams[0]; - console.log("TESTING BROWSER VERSION"); if (isReadableStream(firstStream)) { const newReadableStream = new ReadableStream({ async start(controller) { @@ -33,6 +32,7 @@ export async function* iterateStreams( for (const streamPromise of streams) { const stream = await streamPromise; if (isReadableStream(stream)) { + // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); try { while (true) { diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 56c18e292936..d0a796b920c4 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -34,6 +34,7 @@ export async function* iterateStreams( for (const streamPromise of streams) { const stream = await streamPromise; if (isReadableStream(stream)) { + // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); try { while (true) { From c726213bb6f4b5bcec989bbf39425a4d7230c658 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Thu, 24 Jul 2025 19:13:45 +0000 Subject: [PATCH 09/30] feat(lib-storage): readme for S3TransferManager --- .../src/s3-transfer-manager/README.md | 136 ++++++++++++++++++ .../S3TransferManager.e2e.spec.ts | 6 +- .../s3-transfer-manager/S3TransferManager.ts | 30 ---- 3 files changed, 141 insertions(+), 31 deletions(-) create mode 100644 lib/lib-storage/src/s3-transfer-manager/README.md diff --git a/lib/lib-storage/src/s3-transfer-manager/README.md b/lib/lib-storage/src/s3-transfer-manager/README.md new file mode 100644 index 000000000000..c918df0e53b5 --- /dev/null +++ b/lib/lib-storage/src/s3-transfer-manager/README.md @@ -0,0 +1,136 @@ +# @aws-sdk/lib-storage/s3-transfer-manager + +## Overview + +S3TransferManager is a high level library that helps customer interact with S3 +for their most common use cases that involve multiple API operations through SDK JS V3. +S3TransferManager provides the following features: + +- automatic multipart upload to S3 +- automatic multipart download from S3 +- upload all files in a directory to an S3 bucket recursively or non-recursively +- download all objects in a bucket to a local directory recursively or non-recursively +- transfer progress listener + +## Installation + +## Getting Started + +## Configuration + +When creating an instance of the S3TransferManager, users can configure some of it's client options +to best fit their use case. + +- s3ClientInstance - specify the low level S3 client that will be used to send reqeusts to S3 +- targetPartSizeBytes - specify the target part size to use in mulitpart transfer. Does not + apply to the last part and downloads if multipartDownloadType is PART +- multipartUploadThresholdBytes - specify the size threshold in bytes for multipart upload. +- checksumValidationEnabled - option to disable checksum validation for donwload. +- multipartDownloadType - specify how the SDK should perform multipart download. Either RANGE or PART. +- eventListeners - transfer progress listeners to receive event-driven updates on transfer + progress throughout the lifecycle of a request at client level. Supported callbacks: + - transferInitiated: A new transfer has been initiated. This method is invoked exactly once per + transfer, right after the operation has started. It allows users to retrieve the request and ProgressSnapshot. + - bytesTransferred: Additional bytes have been submitted or received. This method may be called + many times per transfer, depending on the transfer size and I/O buffer sizes. It must be called + at least once for a successful transfer. It allows users to retrieve the the request and the ProgressSnapshot. + - transferComplete: The transfer has completed successfully. This method is called exactly once for + a successful transfer. It allows users to retrieve the request, the response and the ProgressSnapshot. + - transferFailed: The transfer has failed. This method is called exactly once for a failed transfer. + It allows users to retrieve the request and a progress snapshot. + +### Example + +```js +import { S3Client } from "@aws-sdk/client-s3"; +import { S3TransferManager } from "@aws-sdk/lib-storage"; + + const tm = new S3TransferManager ({ + s3ClientInstance: new S3Client({}), + multipartDownloadType: "RANGE", + targetPartSizeBytes: 8 * 1024 * 1024 + multipartThresholdBytes: 16 * 1024 * 1024, + checksumValidationEnabled: true, + checksumAlgorithm: CRC32, + multipartDownloadType: PART, + eventListeners: { + transferInitiated: [transferStarted], + bytesTrnasferred: [progressBar], + transferComplete: [{ + handleEvent: console.log({ + request, snapshot, response + }) + }], + trasnferFailed: [transferFailed] + } + }) +``` + +### Constructor Options + +## API Reference + +## Methods + +### upload() + +### download() + +The download() function in S3TransferManager is a wrapper function for the S3 GetObjectCommand +allowing users to download objects from an S3 bucket using multipart download of two types +which are specified in the configuration of the S3TransferManager instance: Part GET and Ranged GET. +Both of which download the object using GetObjectCommand in separate streams then join them into +one single stream. The S3TransferManager download() supports Readable and ReadableStream for node and browser. + +- Part GET + - Use case: Optimizes downloads for objects that were uploaded using the S3 multipart upload + - How it works: Uses the S3 native download feature with the PartNumber parameter. It fetches part 1 of the object to get the metadata then downloads the remaining parts concurrently. +- Range GET + - Use case: Allows for multipart download for any S3 object regardless of whether it was + uploaded using multipart upload or not + - How it works: Uses the HTTP Range request with the bytes=start-end headers to split objects into + chunks based on the user-provided byte range header, or if not included the MIN_PART_SIZE to make concurrent range requests. + +Users can also include an abortController allowing for cancellation mid download along +with eventListeners for the callbacks: 'transferInitiated', 'bytesTransferred', 'transferComplete', +and 'transferFailed' at client level and request level. 'bytesTransferred' provides progress updates per byte chunk during streaming. + +#### Validation + +Both multipartDownloadTypes have methods that validates the bytes and ranges of the multipart download requests. In multipartDownloadType PART, bytes of the part boundaries in each concurrent request are checked for whether they match the expected byte boundaries. In multipartDownloadType RANGE, the byte ranges are checked for whether they match the expected ranges. An error is thrown on mismatches and all requests for download is cancelled. + +Both both PART and RANGE GET uses the S3 standard IfMatch header with the initial ETag for subsequent parts to ensure object version consistency during a download. + +#### uploadAll() + +#### downloadAll() + +### Event Handling + +#### addEventListener() + +#### removeEventListener() + +#### dispatchEvent() + +## Transfer Options + +### AbortSignal + +### Event Listeners + +## Examples + +### Basic Upload + +### Basic Download + +### Multipart Download + +### Event Handling + +### Abort Operations + +## Performance Considerations + +## Error Handling diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 879fd2f6e9e0..6b7c08762aef 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -53,8 +53,12 @@ describe(S3TransferManager.name, () => { describe("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; - const sizes = [6, 11] as number[]; + // 6 = 1 part, 11 = 2 part, 19 = 3 part + const sizes = [6, 11, 19] as number[]; + // TODO: eventListener callback tests - transferInitiated, bytesTransferred, transferComplete + // TODO: Integration test for transferFailed + // TODO: Write README, think in customer perspective, then based on that write e2e tests for (const mode of modes) { for (const size of sizes) { it(`should download an object of size ${size} with mode ${mode}`, async () => { diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 6cf2f4924bd3..457676c204c6 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -214,36 +214,6 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } - /** - * What is missing from the revised SEP and this implementation currently? - * PART mode: - * - (DONE) Step 5: validate GetObject response for each part - * - If validation fails at any point, cancel all ongoing requests and error out - * - Step 6: after all requests have been sent, validate that the total number of part GET requests sent matches with the - * expected `PartsCount` - * - Step 7: when creating DownloadResponse, set accordingly: - * - (DONE) `ContentLength` : total length of the object saved from Step 3 - * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` - * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as - * the checksum value returned from a part GET request is not the composite - * checksum for the entire object - * RANGE mode: - * - (DONE) Step 7: validate GetObject response for each part. If validation fails or a - * request fails at any point, cancel all ongoing requests and return an error to - * the user. - * - Step 8: after all requests have sent, validate that the total number of ranged - * GET requests sent matches with the expected number saved from Step 5. - * - Step 9: create DownloadResponse. Copy the fields in GetObject response from - * Step 3 and set the following fields accordingly: - * - (DONE) `ContentLength` : total length of the object saved from Step 3 - * - (DONE) `ContentRange`: based on `bytes 0-(ContentLength -1)/ContentLength` - * - If ChecksumType is `COMPOSITE`, set all checksum value members to null as - * the checksum value returned from a part GET request is not the composite - * checksum for the entire object - * Checksum validation notes: - * - - * - */ public async download(request: DownloadRequest, transferOptions?: TransferOptions): Promise { const partNumber = request.PartNumber; if (typeof partNumber === "number") { From 3d26103494949757f318410f87f423dfd6052156 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Thu, 24 Jul 2025 21:11:44 +0000 Subject: [PATCH 10/30] feat(lib-storage): more expect statements in multipartdownload tests and etag tests --- .../S3TransferManager.e2e.spec.ts | 131 +++++++++++++++--- 1 file changed, 112 insertions(+), 19 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 6b7c08762aef..1e07e48cf020 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -1,4 +1,5 @@ -import { S3 } from "@aws-sdk/client-s3"; +import { GetObjectCommandOutput, S3 } from "@aws-sdk/client-s3"; +import { getHeapSnapshot } from "v8"; import { beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; @@ -51,7 +52,7 @@ describe(S3TransferManager.name, () => { }); }, 120_000); - describe("multi part download", () => { + describe.skip("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; // 6 = 1 part, 11 = 2 part, 19 = 3 part const sizes = [6, 11, 19] as number[]; @@ -62,40 +63,59 @@ describe(S3TransferManager.name, () => { for (const mode of modes) { for (const size of sizes) { it(`should download an object of size ${size} with mode ${mode}`, async () => { - const Body = data(size * 1024 * 1024); + const totalSizeMB = size * 1024 * 1024; + const Body = data(totalSizeMB); const Key = `${mode}-size`; if (mode === "PART") { await new Upload({ client, - params: { - Bucket, - Key, - Body, - }, + params: { Bucket, Key, Body }, }).done(); } else { - await client.putObject({ - Bucket, - Key, - Body, - }); + await client.putObject({ Bucket, Key, Body }); } const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; - let bytesTransferred = 0; + const expectBasicTransfer = (request: any, snapshot: any) => { + expect(request.Bucket).toEqual(Bucket); + expect(request.Key).toEqual(Key); + expect(snapshot.totalBytes).toEqual(totalSizeMB); + }; + let bytesTransferred = 0; + let handleEventCalled = false; const download = await tm.download( - { - Bucket, - Key, - }, + { Bucket, Key }, { eventListeners: { + transferInitiated: [ + ({ request, snapshot }) => { + expectBasicTransfer(request, snapshot); + expect(snapshot.transferredBytes).toEqual(0); + }, + ], bytesTransferred: [ ({ request, snapshot }) => { + expectBasicTransfer(request, snapshot); bytesTransferred = snapshot.transferredBytes; + expect(snapshot.transferredBytes).toEqual(bytesTransferred); + }, + ], + transferComplete: [ + ({ request, snapshot, response }) => { + expectBasicTransfer(request, snapshot); + expect(snapshot.transferredBytes).toEqual(totalSizeMB); + expect(response.ETag).toBeDefined(); + expect((response as GetObjectCommandOutput).ContentLength).toEqual(totalSizeMB); + }, + { + handleEvent: (event: any) => { + handleEventCalled = true; + expect(event.request.Bucket).toEqual(Bucket); + expect(event.response).toBeDefined(); + }, }, ], }, @@ -104,13 +124,86 @@ describe(S3TransferManager.name, () => { const serialized = await download.Body?.transformToString(); check(serialized); + expect(download.ContentLength).toEqual(totalSizeMB); expect(bytesTransferred).toEqual(Body.length); + expect(handleEventCalled).toEqual(true); }, 60_000); } } }); - describe("(SEP) download single object tests", () => { + describe("error handling", () => { + const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; + + for (const mode of modes) { + it(`should fail when ETag changes during a ${mode} download`, async () => { + const totalSizeMB = 20 * 1024 * 1024; + const Body = data(totalSizeMB); + const Key = `${mode}-etag-test`; + + if (mode === "PART") { + await new Upload({ + client, + params: { Bucket, Key, Body }, + }).done(); + } else { + await client.putObject({ Bucket, Key, Body }); + } + + let transferFailed = false; + let objectUpdated = false; + + const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; + + // TODO: this test does not currently pass, fix mid-download logic or fix ETag verification in S3TM. + try { + await tm.download( + { Bucket, Key }, + { + eventListeners: { + bytesTransferred: [ + async ({ snapshot }) => { + // Update object after first part is downloaded + if (!objectUpdated && snapshot.transferredBytes > 8 * 1024 * 1024) { + objectUpdated = true; + if (mode === "PART") { + await new Upload({ + client, + params: { + Bucket, + Key, + Body: "updated content", + }, + }).done(); + } else { + await client.putObject({ + Bucket, + Key, + Body: "updated content", + }); + } + } + }, + ], + transferFailed: [ + () => { + transferFailed = true; + }, + ], + }, + } + ); + expect.fail("Download should have failed due to ETag mismatch"); + } catch (error) { + console.log("Error:", error.name, error.message); + expect(transferFailed).toBe(true); + expect(error.name).toContain("PreconditionFailed"); + } + }, 60_000); + } + }); + + describe.skip("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", From c2d686334dcf5e6b9d527f24cef700e4b31108a3 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Fri, 25 Jul 2025 17:27:13 +0000 Subject: [PATCH 11/30] feat(lib-storage): added try catch and dispatches for transferFailedEvents --- .../S3TransferManager.e2e.spec.ts | 44 ++-- .../s3-transfer-manager/S3TransferManager.ts | 219 +++++++++++------- 2 files changed, 151 insertions(+), 112 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 1e07e48cf020..42a491701bcf 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -1,4 +1,4 @@ -import { GetObjectCommandOutput, S3 } from "@aws-sdk/client-s3"; +import { GetObjectCommandOutput, PutObjectCommand, S3 } from "@aws-sdk/client-s3"; import { getHeapSnapshot } from "v8"; import { beforeAll, describe, expect, test as it } from "vitest"; @@ -52,14 +52,15 @@ describe(S3TransferManager.name, () => { }); }, 120_000); - describe.skip("multi part download", () => { + // TODO: eventListener callback tests - transferInitiated, bytesTransferred, transferComplete + // TODO: Integration test for transferFailed + // TODO: Write README, think in customer perspective, then based on that write e2e tests + + describe("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; // 6 = 1 part, 11 = 2 part, 19 = 3 part - const sizes = [6, 11, 19] as number[]; + const sizes = [6, 11, 19, 0] as number[]; - // TODO: eventListener callback tests - transferInitiated, bytesTransferred, transferComplete - // TODO: Integration test for transferFailed - // TODO: Write README, think in customer perspective, then based on that write e2e tests for (const mode of modes) { for (const size of sizes) { it(`should download an object of size ${size} with mode ${mode}`, async () => { @@ -161,30 +162,21 @@ describe(S3TransferManager.name, () => { { Bucket, Key }, { eventListeners: { - bytesTransferred: [ + transferInitiated: [ async ({ snapshot }) => { // Update object after first part is downloaded - if (!objectUpdated && snapshot.transferredBytes > 8 * 1024 * 1024) { - objectUpdated = true; - if (mode === "PART") { - await new Upload({ - client, - params: { - Bucket, - Key, - Body: "updated content", - }, - }).done(); - } else { - await client.putObject({ - Bucket, - Key, - Body: "updated content", - }); - } - } + objectUpdated = true; + const objectUpload = await client.send( + new PutObjectCommand({ + Bucket, + Key: "6mb", + Body: data(2 * 1024 * 1024), + }) + ); + console.log(objectUpload.ETag); }, ], + bytesTransferred: [], transferFailed: [ () => { transferFailed = true; diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 457676c204c6..6faac8d2d502 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -5,6 +5,7 @@ import type { PutObjectCommandInput, } from "@aws-sdk/client-s3"; import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; +import { CONFIG_RESPONSE_CHECKSUM_VALIDATION } from "@aws-sdk/middleware-flexible-checksums/dist-types"; import { getChecksum } from "@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum"; import { type StreamingBlobPayloadOutputTypes, Checksum, ChecksumConstructor } from "@smithy/types"; @@ -344,75 +345,89 @@ export class S3TransferManager implements IS3TransferManager { ...request, PartNumber: 1, }; - const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); - const initialETag = initialPart.ETag ?? undefined; - const partSize = initialPart.ContentLength; - totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : undefined; - this.dispatchTransferInitiatedEvent(request, totalSize); - if (initialPart.Body) { - if (initialPart.Body && typeof (initialPart.Body as any).getReader === "function") { - const reader = (initialPart.Body as any).getReader(); - (initialPart.Body as any).getReader = function () { - return reader; - }; + try { + const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); + const initialETag = initialPart.ETag ?? undefined; + const partSize = initialPart.ContentLength; + totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : undefined; + this.dispatchTransferInitiatedEvent(request, totalSize); + if (initialPart.Body) { + if (initialPart.Body && typeof (initialPart.Body as any).getReader === "function") { + const reader = (initialPart.Body as any).getReader(); + (initialPart.Body as any).getReader = function () { + return reader; + }; + } + streams.push(Promise.resolve(initialPart.Body)); + requests.push(initialPartRequest); } - streams.push(Promise.resolve(initialPart.Body)); - requests.push(initialPartRequest); - } - this.updateResponseLengthAndRange(initialPart, totalSize); - this.assignMetadata(metadata, initialPart); - this.updateChecksumValues(initialPart, metadata); - - let partCount = 1; - if (initialPart.PartsCount! > 1) { - for (let part = 2; part <= initialPart.PartsCount!; part++) { - this.checkAborted(transferOptions); - const getObjectRequest = { - ...request, - PartNumber: part, - IfMatch: !request.VersionId ? initialETag : undefined, - }; - const getObject = this.s3ClientInstance - .send(new GetObjectCommand(getObjectRequest), transferOptions) - .then((response) => { - this.validatePartDownload(response.ContentRange, part, partSize ?? 0); - if (response.Body && typeof (response.Body as any).getReader === "function") { - const reader = (response.Body as any).getReader(); - (response.Body as any).getReader = function () { - return reader; - }; - } - return response.Body!; - }); - - streams.push(getObject); - requests.push(getObjectRequest); - partCount++; - } - if (partCount !== initialPart.PartsCount) { - throw new Error( - `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` - ); + this.updateResponseLengthAndRange(initialPart, totalSize); + this.assignMetadata(metadata, initialPart); + this.updateChecksumValues(initialPart, metadata); + + let partCount = 1; + if (initialPart.PartsCount! > 1) { + for (let part = 2; part <= initialPart.PartsCount!; part++) { + this.checkAborted(transferOptions); + const getObjectRequest = { + ...request, + PartNumber: part, + IfMatch: initialETag, + }; + const getObject = this.s3ClientInstance + .send(new GetObjectCommand(getObjectRequest), transferOptions) + .then((response) => { + this.validatePartDownload(response.ContentRange, part, partSize ?? 0); + if (response.Body && typeof (response.Body as any).getReader === "function") { + const reader = (response.Body as any).getReader(); + (response.Body as any).getReader = function () { + return reader; + }; + } + return response.Body!; + }) + .catch((error) => { + this.dispatchTransferFailedEvent(getObjectRequest, totalSize, error as Error); + throw error; + }); + streams.push(getObject); + requests.push(getObjectRequest); + partCount++; + } + if (partCount !== initialPart.PartsCount) { + throw new Error( + `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` + ); + } } + } catch (error) { + this.dispatchTransferFailedEvent(request, totalSize, error); + throw error; } } else { this.checkAborted(transferOptions); - const getObjectRequest = { - ...request, - }; - const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - totalSize = getObject.ContentRange ? Number.parseInt(getObject.ContentRange.split("/")[1]) : undefined; + try { + const getObjectRequest = { + ...request, + }; - this.dispatchTransferInitiatedEvent(request, totalSize); - if (getObject.Body) { - streams.push(Promise.resolve(getObject.Body)); - requests.push(getObjectRequest); + const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + totalSize = getObject.ContentRange ? Number.parseInt(getObject.ContentRange.split("/")[1]) : undefined; + + this.dispatchTransferInitiatedEvent(request, totalSize); + if (getObject.Body) { + streams.push(Promise.resolve(getObject.Body)); + requests.push(getObjectRequest); + } + this.updateResponseLengthAndRange(getObject, totalSize); + this.assignMetadata(metadata, getObject); + this.updateChecksumValues(getObject, metadata); + } catch (error) { + this.dispatchTransferFailedEvent(request, undefined, error); + throw error; } - this.updateResponseLengthAndRange(getObject, totalSize); - this.assignMetadata(metadata, getObject); - this.updateChecksumValues(getObject, metadata); } return { @@ -441,16 +456,40 @@ export class S3TransferManager implements IS3TransferManager { left = userRangeLeft; right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE - 1); } - const getObjectRequest: GetObjectCommandInput = { - ...request, - Range: `bytes=${left}-${right}`, - }; - const initialRangeGet = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - this.validateRangeDownload(`bytes=${left}-${right}`, initialRangeGet.ContentRange); - const initialETag = initialRangeGet.ETag ?? undefined; - const totalSize = initialRangeGet.ContentRange - ? Number.parseInt(initialRangeGet.ContentRange.split("/")[1]) - : undefined; + + let totalSize: number | undefined; + let initialETag: string | undefined; + + try { + const getObjectRequest: GetObjectCommandInput = { + ...request, + Range: `bytes=${left}-${right}`, + }; + const initialRangeGet = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + this.validateRangeDownload(`bytes=${left}-${right}`, initialRangeGet.ContentRange); + initialETag = initialRangeGet.ETag ?? undefined; + totalSize = initialRangeGet.ContentRange + ? Number.parseInt(initialRangeGet.ContentRange.split("/")[1]) + : undefined; + + if (initialRangeGet.Body && typeof (initialRangeGet.Body as any).getReader === "function") { + const reader = (initialRangeGet.Body as any).getReader(); + (initialRangeGet.Body as any).getReader = function () { + return reader; + }; + } + + this.dispatchTransferInitiatedEvent(request, totalSize); + streams.push(Promise.resolve(initialRangeGet.Body!)); + requests.push(getObjectRequest); + + this.updateResponseLengthAndRange(initialRangeGet, totalSize); + this.assignMetadata(metadata, initialRangeGet); + this.updateChecksumValues(initialRangeGet, metadata); + } catch (error) { + this.dispatchTransferFailedEvent(request, totalSize, error as Error); + throw error; + } let expectedRequestCount = 1; if (totalSize) { @@ -460,25 +499,11 @@ export class S3TransferManager implements IS3TransferManager { expectedRequestCount += additionalRequests; } - if (initialRangeGet.Body && typeof (initialRangeGet.Body as any).getReader === "function") { - const reader = (initialRangeGet.Body as any).getReader(); - (initialRangeGet.Body as any).getReader = function () { - return reader; - }; - } - - this.dispatchTransferInitiatedEvent(request, totalSize); - streams.push(Promise.resolve(initialRangeGet.Body!)); - requests.push(getObjectRequest); - - this.updateResponseLengthAndRange(initialRangeGet, totalSize); - this.assignMetadata(metadata, initialRangeGet); - this.updateChecksumValues(initialRangeGet, metadata); - left = right + 1; right = Math.min(left + S3TransferManager.MIN_PART_SIZE - 1, maxRange); remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; let actualRequestCount = 1; + while (remainingLength > 0) { this.checkAborted(transferOptions); @@ -486,7 +511,7 @@ export class S3TransferManager implements IS3TransferManager { const getObjectRequest: GetObjectCommandInput = { ...request, Range: range, - IfMatch: !request.VersionId ? initialETag : undefined, + IfMatch: initialETag, }; const getObject = this.s3ClientInstance .send(new GetObjectCommand(getObjectRequest), transferOptions) @@ -499,6 +524,10 @@ export class S3TransferManager implements IS3TransferManager { }; } return response.Body!; + }) + .catch((error) => { + this.dispatchTransferFailedEvent(getObjectRequest, totalSize, error); + throw error; }); streams.push(getObject); @@ -587,6 +616,24 @@ export class S3TransferManager implements IS3TransferManager { return true; } + private dispatchTransferFailedEvent( + request: DownloadRequest | UploadRequest, + totalSize?: number, + error?: Error + ): boolean { + this.dispatchEvent( + Object.assign(new Event("transferFailed"), { + request, + error, + snapshot: { + transferredBytes: 0, + totalBytes: totalSize, + }, + }) + ); + return true; + } + private *iterateListeners(eventListeners: TransferEventListeners = {}) { for (const key in eventListeners) { const eventType = key as keyof TransferEventListeners; From 832e6d7c716bded5188fa8d313e888fa04932ccd Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Mon, 28 Jul 2025 03:47:09 +0000 Subject: [PATCH 12/30] feat: fixed unhandled promise rejection issue, added unit and e2e test for ETag --- .../S3TransferManager.e2e.spec.ts | 67 +++++----- .../S3TransferManager.spec.ts | 68 +++++++++++ .../s3-transfer-manager/S3TransferManager.ts | 64 ++++++++-- .../src/s3-transfer-manager/join-streams.ts | 10 ++ package.json | 1 + yarn.lock | 115 +++++++++++++++++- 6 files changed, 279 insertions(+), 46 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 42a491701bcf..1ec97a3efe12 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -1,10 +1,16 @@ -import { GetObjectCommandOutput, PutObjectCommand, S3 } from "@aws-sdk/client-s3"; +import { + GetObjectCommandOutput, + ListBucketInventoryConfigurationsOutputFilterSensitiveLog, + PutObjectCommand, + S3, +} from "@aws-sdk/client-s3"; +import internal from "stream"; import { getHeapSnapshot } from "v8"; import { beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; import { Upload } from "../Upload"; -import { S3TransferManager } from "./S3TransferManager"; +import { internalEventHandler, S3TransferManager } from "./S3TransferManager"; import type { IS3TransferManager, S3TransferManagerConfig } from "./types"; describe(S3TransferManager.name, () => { @@ -56,7 +62,7 @@ describe(S3TransferManager.name, () => { // TODO: Integration test for transferFailed // TODO: Write README, think in customer perspective, then based on that write e2e tests - describe("multi part download", () => { + describe.skip("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; // 6 = 1 part, 11 = 2 part, 19 = 3 part const sizes = [6, 11, 19, 0] as number[]; @@ -68,14 +74,10 @@ describe(S3TransferManager.name, () => { const Body = data(totalSizeMB); const Key = `${mode}-size`; - if (mode === "PART") { - await new Upload({ - client, - params: { Bucket, Key, Body }, - }).done(); - } else { - await client.putObject({ Bucket, Key, Body }); - } + await new Upload({ + client, + params: { Bucket, Key, Body }, + }).done(); const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; @@ -137,7 +139,7 @@ describe(S3TransferManager.name, () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; for (const mode of modes) { - it(`should fail when ETag changes during a ${mode} download`, async () => { + it(`should fail when ETag changes during a ${mode} download`, async () => { const totalSizeMB = 20 * 1024 * 1024; const Body = data(totalSizeMB); const Key = `${mode}-etag-test`; @@ -152,30 +154,30 @@ describe(S3TransferManager.name, () => { } let transferFailed = false; - let objectUpdated = false; - const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; - // TODO: this test does not currently pass, fix mid-download logic or fix ETag verification in S3TM. try { + internalEventHandler.afterInitialGetObject = async () => { + try { + if (mode === "PART") { + await new Upload({ + client, + params: { Bucket, Key, Body: data(20 * 1024 * 1024 - 8) }, + }).done(); + } else { + await client.putObject({ Bucket, Key, Body: data(20 * 1024 * 1024 - 8) }); + } + } catch (err) { + // ignore errors + } + internalEventHandler.afterInitialGetObject = async () => {}; + }; + await tm.download( { Bucket, Key }, { eventListeners: { - transferInitiated: [ - async ({ snapshot }) => { - // Update object after first part is downloaded - objectUpdated = true; - const objectUpload = await client.send( - new PutObjectCommand({ - Bucket, - Key: "6mb", - Body: data(2 * 1024 * 1024), - }) - ); - console.log(objectUpload.ETag); - }, - ], + transferInitiated: [], bytesTransferred: [], transferFailed: [ () => { @@ -187,15 +189,16 @@ describe(S3TransferManager.name, () => { ); expect.fail("Download should have failed due to ETag mismatch"); } catch (error) { - console.log("Error:", error.name, error.message); expect(transferFailed).toBe(true); - expect(error.name).toContain("PreconditionFailed"); + expect(error.name).toEqual("PreconditionFailed"); + } finally { + internalEventHandler.afterInitialGetObject = async () => {}; } }, 60_000); } }); - describe.skip("(SEP) download single object tests", () => { + describe("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index adce0d265fe6..aea4276ec361 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -1,6 +1,8 @@ import { S3, S3Client } from "@aws-sdk/client-s3"; +import { GetObjectCommand, PutObjectCommand } from "@aws-sdk/client-s3"; import { TransferCompleteEvent, TransferEvent } from "@aws-sdk/lib-storage/dist-types/s3-transfer-manager/types"; import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; +import { mockClient } from "aws-sdk-client-mock"; import { Readable } from "stream"; import { beforeAll, beforeEach, describe, expect, test as it, vi } from "vitest"; @@ -41,6 +43,72 @@ describe("S3TransferManager Unit Tests", () => { responseChecksumValidation: "WHEN_REQUIRED", }); }); + + describe("ETag Unit tests", () => { + const s3Mock = mockClient(S3Client); + + beforeEach(() => { + s3Mock.reset(); + }); + + it("Should throw precondition error when ETag changes mid-download", async () => { + const bucket = "test-bucket"; + const key = "test-key"; + const originalData = Buffer.alloc(20 * 1024 * 1024, "a"); // 20MB + + let getCallCount = 0; + + s3Mock.on(GetObjectCommand).callsFake((input) => { + getCallCount++; + + if (getCallCount === 1) { + // First call - return original object with PartsCount > 1 to trigger concurrent requests + return { + Body: Readable.from([originalData.slice(0, 8 * 1024 * 1024)]), + ETag: '"original-etag"', + ContentLength: 8 * 1024 * 1024, + ContentRange: "bytes 0-8388607/20971520", // Part 1 of 3 parts + PartsCount: 3, + }; + } else { + // Subsequent calls with IfMatch should fail with 412 Precondition Failed + if (input.IfMatch === '"original-etag"') { + const error = new Error("The condition specified using HTTP conditional header(s) is not met."); + error.name = "PreconditionFailed"; + (error as any).$metadata = { + httpStatusCode: 412, + }; + throw error; + } + + // Fallback for any other calls + return { + Body: Readable.from([originalData.slice(0, 8 * 1024 * 1024)]), + ETag: '"original-etag"', + ContentLength: 8 * 1024 * 1024, + }; + } + }); + + const tm = new S3TransferManager({ + s3ClientInstance: new S3Client({}), + targetPartSizeBytes: 8 * 1024 * 1024, + multipartDownloadType: "PART", // Use PART mode to trigger the concurrent requests + }); + + await expect( + tm.download({ + Bucket: bucket, + Key: key, + }) + ).rejects.toThrowError( + expect.objectContaining({ + name: "PreconditionFailed", + }) + ); + }); + }); + describe("S3TransferManager Constructor", () => { it("Should create an instance of S3TransferManager with defaults given no parameters", () => { const tm = new S3TransferManager() as any; diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 6faac8d2d502..05b079a86313 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -330,6 +330,7 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } + // TODO: fix case if object size is 0 bytes protected async downloadByPart( request: DownloadRequest, transferOptions: TransferOptions, @@ -348,6 +349,7 @@ export class S3TransferManager implements IS3TransferManager { try { const initialPart = await this.s3ClientInstance.send(new GetObjectCommand(initialPartRequest), transferOptions); const initialETag = initialPart.ETag ?? undefined; + await internalEventHandler.afterInitialGetObject(); const partSize = initialPart.ContentLength; totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : undefined; this.dispatchTransferInitiatedEvent(request, totalSize); @@ -368,6 +370,9 @@ export class S3TransferManager implements IS3TransferManager { let partCount = 1; if (initialPart.PartsCount! > 1) { + const concurrentRequests = []; + const concurrentRequestInputs = []; + for (let part = 2; part <= initialPart.PartsCount!; part++) { this.checkAborted(transferOptions); const getObjectRequest = { @@ -375,6 +380,7 @@ export class S3TransferManager implements IS3TransferManager { PartNumber: part, IfMatch: initialETag, }; + const getObject = this.s3ClientInstance .send(new GetObjectCommand(getObjectRequest), transferOptions) .then((response) => { @@ -386,15 +392,25 @@ export class S3TransferManager implements IS3TransferManager { }; } return response.Body!; - }) - .catch((error) => { - this.dispatchTransferFailedEvent(getObjectRequest, totalSize, error as Error); - throw error; }); - streams.push(getObject); - requests.push(getObjectRequest); + + concurrentRequests.push(getObject); + concurrentRequestInputs.push(getObjectRequest); partCount++; } + + try { + // Add promise streams to streams array ONLY if all are resolved + const responses = await Promise.all(concurrentRequests); + for (let i = 0; i < responses.length; i++) { + streams.push(Promise.resolve(responses[i])); + requests.push(concurrentRequestInputs[i]); + } + } catch (error) { + this.dispatchTransferFailedEvent(request, totalSize, error as Error); + throw error; + } + if (partCount !== initialPart.PartsCount) { throw new Error( `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` @@ -435,6 +451,7 @@ export class S3TransferManager implements IS3TransferManager { }; } + // TODO: fix case if object size is 0 bytes protected async downloadByRange( request: DownloadRequest, transferOptions: TransferOptions, @@ -466,6 +483,7 @@ export class S3TransferManager implements IS3TransferManager { Range: `bytes=${left}-${right}`, }; const initialRangeGet = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + await internalEventHandler.afterInitialGetObject(); this.validateRangeDownload(`bytes=${left}-${right}`, initialRangeGet.ContentRange); initialETag = initialRangeGet.ETag ?? undefined; totalSize = initialRangeGet.ContentRange @@ -504,6 +522,9 @@ export class S3TransferManager implements IS3TransferManager { remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; let actualRequestCount = 1; + const concurrentRequests = []; + const concurrentRequestInputs = []; + while (remainingLength > 0) { this.checkAborted(transferOptions); @@ -513,6 +534,7 @@ export class S3TransferManager implements IS3TransferManager { Range: range, IfMatch: initialETag, }; + const getObject = this.s3ClientInstance .send(new GetObjectCommand(getObjectRequest), transferOptions) .then((response) => { @@ -524,14 +546,10 @@ export class S3TransferManager implements IS3TransferManager { }; } return response.Body!; - }) - .catch((error) => { - this.dispatchTransferFailedEvent(getObjectRequest, totalSize, error); - throw error; }); - streams.push(getObject); - requests.push(getObjectRequest); + concurrentRequests.push(getObject); + concurrentRequestInputs.push(getObjectRequest); actualRequestCount++; left = right + 1; @@ -539,6 +557,20 @@ export class S3TransferManager implements IS3TransferManager { remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; } + if (concurrentRequests.length > 0) { + try { + // Add promise streams to streams array ONLY if all are resolved + const responses = await Promise.all(concurrentRequests); + for (let i = 0; i < responses.length; i++) { + streams.push(Promise.resolve(responses[i])); + requests.push(concurrentRequestInputs[i]); + } + } catch (error) { + this.dispatchTransferFailedEvent(request, totalSize, error as Error); + throw error; + } + } + if (expectedRequestCount !== actualRequestCount) { throw new Error( `The number of ranged GET requests sent (${actualRequestCount}) does not match the expected number (${expectedRequestCount})` @@ -708,3 +740,11 @@ export class S3TransferManager implements IS3TransferManager { throw new Error(`Expected range to end at ${expectedEnd} but got ${end}`); } } +/** + * + * + * @internal + */ +export const internalEventHandler = { + async afterInitialGetObject() {}, +}; diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index d0a796b920c4..7aa50ed689fa 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -25,6 +25,15 @@ export async function joinStreams( } } +/** + * + * + * @internal + */ +export const internalEventHandler = { + async onStreamAvailable() {}, +}; + export async function* iterateStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -33,6 +42,7 @@ export async function* iterateStreams( let index = 0; for (const streamPromise of streams) { const stream = await streamPromise; + await internalEventHandler.onStreamAvailable(); if (isReadableStream(stream)) { // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); diff --git a/package.json b/package.json index 2da8be494643..755f6550d735 100644 --- a/package.json +++ b/package.json @@ -79,6 +79,7 @@ "@typescript-eslint/eslint-plugin": "5.55.0", "@typescript-eslint/parser": "5.55.0", "async": "3.2.4", + "aws-sdk-client-mock": "^4.1.0", "concurrently": "7.0.0", "decomment": "0.9.5", "downlevel-dts": "0.10.1", diff --git a/yarn.lock b/yarn.lock index a7f38c463fad..318d03fd3ceb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -28756,7 +28756,7 @@ __metadata: languageName: node linkType: hard -"@sinonjs/commons@npm:^3.0.0": +"@sinonjs/commons@npm:^3.0.0, @sinonjs/commons@npm:^3.0.1": version: 3.0.1 resolution: "@sinonjs/commons@npm:3.0.1" dependencies: @@ -28765,6 +28765,15 @@ __metadata: languageName: node linkType: hard +"@sinonjs/fake-timers@npm:11.2.2": + version: 11.2.2 + resolution: "@sinonjs/fake-timers@npm:11.2.2" + dependencies: + "@sinonjs/commons": "npm:^3.0.0" + checksum: 10c0/a4218efa6fdafda622d02d4c0a6ab7df3641cb038bb0b14f0a3ee56f50c95aab4f1ab2d7798ce928b40c6fc1839465a558c9393a77e4dca879e1b2f8d60d8136 + languageName: node + linkType: hard + "@sinonjs/fake-timers@npm:^10.0.2": version: 10.3.0 resolution: "@sinonjs/fake-timers@npm:10.3.0" @@ -28774,6 +28783,15 @@ __metadata: languageName: node linkType: hard +"@sinonjs/fake-timers@npm:^13.0.1": + version: 13.0.5 + resolution: "@sinonjs/fake-timers@npm:13.0.5" + dependencies: + "@sinonjs/commons": "npm:^3.0.1" + checksum: 10c0/a707476efd523d2138ef6bba916c83c4a377a8372ef04fad87499458af9f01afc58f4f245c5fd062793d6d70587309330c6f96947b5bd5697961c18004dc3e26 + languageName: node + linkType: hard + "@sinonjs/fake-timers@npm:^9.1.2": version: 9.1.2 resolution: "@sinonjs/fake-timers@npm:9.1.2" @@ -28783,6 +28801,23 @@ __metadata: languageName: node linkType: hard +"@sinonjs/samsam@npm:^8.0.0": + version: 8.0.3 + resolution: "@sinonjs/samsam@npm:8.0.3" + dependencies: + "@sinonjs/commons": "npm:^3.0.1" + type-detect: "npm:^4.1.0" + checksum: 10c0/9bf57a8f8a484b3455696786e1679db7f0d6017de62099ee304bd364281fcb20895b7c6b05292aa10fecf76df27691e914fc3e1cb8a56d88c027e87d869dcf0c + languageName: node + linkType: hard + +"@sinonjs/text-encoding@npm:^0.7.3": + version: 0.7.3 + resolution: "@sinonjs/text-encoding@npm:0.7.3" + checksum: 10c0/b112d1e97af7f99fbdc63c7dbcd35d6a60764dfec85cfcfff532e55cce8ecd8453f9fa2139e70aea47142c940fd90cd201d19f370b9a0141700d8a6de3116815 + languageName: node + linkType: hard + "@smithy/abort-controller@npm:^4.0.4": version: 4.0.4 resolution: "@smithy/abort-controller@npm:4.0.4" @@ -29780,6 +29815,22 @@ __metadata: languageName: node linkType: hard +"@types/sinon@npm:^17.0.3": + version: 17.0.4 + resolution: "@types/sinon@npm:17.0.4" + dependencies: + "@types/sinonjs__fake-timers": "npm:*" + checksum: 10c0/7c67ae1050d98a86d8dd771f0a764e97adb9d54812bf3b001195f8cfaa1e2bdfc725d5b970b91e7b0bb6b7c1ca209f47993f2c6f84f1f868313c37441313ca5b + languageName: node + linkType: hard + +"@types/sinonjs__fake-timers@npm:*": + version: 8.1.5 + resolution: "@types/sinonjs__fake-timers@npm:8.1.5" + checksum: 10c0/2b8bdc246365518fc1b08f5720445093cce586183acca19a560be6ef81f824bd9a96c090e462f622af4d206406dadf2033c5daf99a51c1096da6494e5c8dc32e + languageName: node + linkType: hard + "@types/stack-utils@npm:^2.0.0": version: 2.0.3 resolution: "@types/stack-utils@npm:2.0.3" @@ -31111,6 +31162,17 @@ __metadata: languageName: node linkType: hard +"aws-sdk-client-mock@npm:^4.1.0": + version: 4.1.0 + resolution: "aws-sdk-client-mock@npm:4.1.0" + dependencies: + "@types/sinon": "npm:^17.0.3" + sinon: "npm:^18.0.1" + tslib: "npm:^2.1.0" + checksum: 10c0/045caad0cff0ffeb08e69849dcae51aac8999163c58d71220bf47a82c237aabab2abf92bf6bf3bd7666e6e8984513c628e01a89eafa46fb230201d6587bc01e9 + languageName: node + linkType: hard + "aws-sdk-js-v3@workspace:.": version: 0.0.0-use.local resolution: "aws-sdk-js-v3@workspace:." @@ -31132,6 +31194,7 @@ __metadata: "@typescript-eslint/eslint-plugin": "npm:5.55.0" "@typescript-eslint/parser": "npm:5.55.0" async: "npm:3.2.4" + aws-sdk-client-mock: "npm:^4.1.0" concurrently: "npm:7.0.0" decomment: "npm:0.9.5" downlevel-dts: "npm:0.10.1" @@ -32801,6 +32864,13 @@ __metadata: languageName: node linkType: hard +"diff@npm:^5.2.0": + version: 5.2.0 + resolution: "diff@npm:5.2.0" + checksum: 10c0/aed0941f206fe261ecb258dc8d0ceea8abbde3ace5827518ff8d302f0fc9cc81ce116c4d8f379151171336caf0516b79e01abdc1ed1201b6440d895a66689eb4 + languageName: node + linkType: hard + "dir-glob@npm:^3.0.1": version: 3.0.1 resolution: "dir-glob@npm:3.0.1" @@ -36933,6 +37003,13 @@ __metadata: languageName: node linkType: hard +"just-extend@npm:^6.2.0": + version: 6.2.0 + resolution: "just-extend@npm:6.2.0" + checksum: 10c0/d41cbdb6d85b986d4deaf2144d81d4f7266cd408fc95189d046d63f610c2dc486b141aeb6ef319c2d76fe904d45a6bb31f19b098ff0427c35688e0c383fc0511 + languageName: node + linkType: hard + "jwa@npm:^1.4.1": version: 1.4.1 resolution: "jwa@npm:1.4.1" @@ -38117,6 +38194,19 @@ __metadata: languageName: node linkType: hard +"nise@npm:^6.0.0": + version: 6.1.1 + resolution: "nise@npm:6.1.1" + dependencies: + "@sinonjs/commons": "npm:^3.0.1" + "@sinonjs/fake-timers": "npm:^13.0.1" + "@sinonjs/text-encoding": "npm:^0.7.3" + just-extend: "npm:^6.2.0" + path-to-regexp: "npm:^8.1.0" + checksum: 10c0/09471adb738dc3be2981cc7815c90879ed6a5a3e162202ca66e12f9a5a0956bea718d0ec2f0c07acc26e3f958481b8fb30c30da76c13620e922f3b9dcd249c50 + languageName: node + linkType: hard + "no-case@npm:^3.0.4": version: 3.0.4 resolution: "no-case@npm:3.0.4" @@ -39006,6 +39096,13 @@ __metadata: languageName: node linkType: hard +"path-to-regexp@npm:^8.1.0": + version: 8.2.0 + resolution: "path-to-regexp@npm:8.2.0" + checksum: 10c0/ef7d0a887b603c0a142fad16ccebdcdc42910f0b14830517c724466ad676107476bba2fe9fffd28fd4c141391ccd42ea426f32bb44c2c82ecaefe10c37b90f5a + languageName: node + linkType: hard + "path-type@npm:^3.0.0": version: 3.0.0 resolution: "path-type@npm:3.0.0" @@ -40530,6 +40627,20 @@ __metadata: languageName: node linkType: hard +"sinon@npm:^18.0.1": + version: 18.0.1 + resolution: "sinon@npm:18.0.1" + dependencies: + "@sinonjs/commons": "npm:^3.0.1" + "@sinonjs/fake-timers": "npm:11.2.2" + "@sinonjs/samsam": "npm:^8.0.0" + diff: "npm:^5.2.0" + nise: "npm:^6.0.0" + supports-color: "npm:^7" + checksum: 10c0/c4554b8d9654d42fc4baefecd3b5ac42bcce73ad926d58521233d9c355dc2c1a0d73c55e5b2c929b6814e528cd9b54bc61096b9288579f9b284edd6e3d2da3df + languageName: node + linkType: hard + "sisteransi@npm:^1.0.5": version: 1.0.5 resolution: "sisteransi@npm:1.0.5" @@ -41025,7 +41136,7 @@ __metadata: languageName: node linkType: hard -"supports-color@npm:^7.0.0, supports-color@npm:^7.1.0": +"supports-color@npm:^7, supports-color@npm:^7.0.0, supports-color@npm:^7.1.0": version: 7.2.0 resolution: "supports-color@npm:7.2.0" dependencies: From 36553190511c3e3216cc910befbc9d591e6a8ebf Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Mon, 28 Jul 2025 15:08:15 +0000 Subject: [PATCH 13/30] feat: part and range handles 0 byte objects, added helper function for metadata assignment --- .../S3TransferManager.e2e.spec.ts | 82 ++++++++++++++++++- .../s3-transfer-manager/S3TransferManager.ts | 65 +++++++++------ 2 files changed, 122 insertions(+), 25 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 1ec97a3efe12..ad85e5b3ac30 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -62,7 +62,7 @@ describe(S3TransferManager.name, () => { // TODO: Integration test for transferFailed // TODO: Write README, think in customer perspective, then based on that write e2e tests - describe.skip("multi part download", () => { + describe("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; // 6 = 1 part, 11 = 2 part, 19 = 3 part const sizes = [6, 11, 19, 0] as number[]; @@ -128,6 +128,7 @@ describe(S3TransferManager.name, () => { check(serialized); expect(download.ContentLength).toEqual(totalSizeMB); + expect(download.ContentRange).toEqual(`bytes 0-${totalSizeMB - 1}/${totalSizeMB}`); expect(bytesTransferred).toEqual(Body.length); expect(handleEventCalled).toEqual(true); }, 60_000); @@ -135,6 +136,85 @@ describe(S3TransferManager.name, () => { } }); + /** + * TODO: RANGE multipartdownloadtype specific tests + * - Download object uploaded using multipart upload + * - Download with custom range + * - bytes=0-5242880 + * - bytes=0-10485760 + * - Download object uploaded using single part upload + * - Download with custom range + * - bytes=0-5242880 + * - bytes=0-10485760 + */ + describe.skip("RANGE tests", () => { + const sizes = [0] as number[]; + for (const size of sizes) { + it(`should download an object of size ${size} with mode RANGE`, async () => { + const totalSizeMB = size * 1024 * 1024; + const Body = data(totalSizeMB); + const Key = `RANGE-${size}`; + + await new Upload({ + client, + params: { Bucket, Key, Body }, + }).done(); + + const tm: S3TransferManager = tmRange; + + const expectBasicTransfer = (request: any, snapshot: any) => { + expect(request.Bucket).toEqual(Bucket); + expect(request.Key).toEqual(Key); + expect(snapshot.totalBytes).toEqual(totalSizeMB); + }; + + let bytesTransferred = 0; + let handleEventCalled = false; + const download = await tm.download( + { Bucket, Key }, + { + eventListeners: { + transferInitiated: [ + ({ request, snapshot }) => { + expectBasicTransfer(request, snapshot); + expect(snapshot.transferredBytes).toEqual(0); + }, + ], + bytesTransferred: [ + ({ request, snapshot }) => { + expectBasicTransfer(request, snapshot); + bytesTransferred = snapshot.transferredBytes; + expect(snapshot.transferredBytes).toEqual(bytesTransferred); + }, + ], + transferComplete: [ + ({ request, snapshot, response }) => { + expectBasicTransfer(request, snapshot); + expect(snapshot.transferredBytes).toEqual(totalSizeMB); + expect(response.ETag).toBeDefined(); + expect((response as GetObjectCommandOutput).ContentLength).toEqual(totalSizeMB); + }, + { + handleEvent: (event: any) => { + handleEventCalled = true; + expect(event.request.Bucket).toEqual(Bucket); + expect(event.response).toBeDefined(); + }, + }, + ], + }, + } + ); + const serialized = await download.Body?.transformToString(); + check(serialized); + + expect(download.ContentLength).toEqual(totalSizeMB); + expect(bytesTransferred).toEqual(Body.length); + expect(handleEventCalled).toEqual(true); + }, 60_000); + } + }); + describe("error handling", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 05b079a86313..4a42ff2ea4dd 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -330,7 +330,6 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } - // TODO: fix case if object size is 0 bytes protected async downloadByPart( request: DownloadRequest, transferOptions: TransferOptions, @@ -351,7 +350,7 @@ export class S3TransferManager implements IS3TransferManager { const initialETag = initialPart.ETag ?? undefined; await internalEventHandler.afterInitialGetObject(); const partSize = initialPart.ContentLength; - totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : undefined; + totalSize = initialPart.ContentRange ? Number.parseInt(initialPart.ContentRange.split("/")[1]) : 0; this.dispatchTransferInitiatedEvent(request, totalSize); if (initialPart.Body) { if (initialPart.Body && typeof (initialPart.Body as any).getReader === "function") { @@ -364,9 +363,7 @@ export class S3TransferManager implements IS3TransferManager { requests.push(initialPartRequest); } - this.updateResponseLengthAndRange(initialPart, totalSize); - this.assignMetadata(metadata, initialPart); - this.updateChecksumValues(initialPart, metadata); + this.processResponseMetadata(initialPart, metadata, totalSize); let partCount = 1; if (initialPart.PartsCount! > 1) { @@ -430,16 +427,14 @@ export class S3TransferManager implements IS3TransferManager { }; const getObject = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); - totalSize = getObject.ContentRange ? Number.parseInt(getObject.ContentRange.split("/")[1]) : undefined; + totalSize = getObject.ContentRange ? Number.parseInt(getObject.ContentRange.split("/")[1]) : 0; this.dispatchTransferInitiatedEvent(request, totalSize); if (getObject.Body) { streams.push(Promise.resolve(getObject.Body)); requests.push(getObjectRequest); } - this.updateResponseLengthAndRange(getObject, totalSize); - this.assignMetadata(metadata, getObject); - this.updateChecksumValues(getObject, metadata); + this.processResponseMetadata(getObject, metadata, totalSize); } catch (error) { this.dispatchTransferFailedEvent(request, undefined, error); throw error; @@ -451,7 +446,6 @@ export class S3TransferManager implements IS3TransferManager { }; } - // TODO: fix case if object size is 0 bytes protected async downloadByRange( request: DownloadRequest, transferOptions: TransferOptions, @@ -461,6 +455,23 @@ export class S3TransferManager implements IS3TransferManager { ): Promise<{ totalSize: number | undefined }> { this.checkAborted(transferOptions); + const headResponse = await this.s3ClientInstance.send( + new HeadObjectCommand({ Bucket: request.Bucket, Key: request.Key }), + transferOptions + ); + + if (headResponse.ContentLength === 0) { + const getObjectRequest = { ...request }; + const response = await this.s3ClientInstance.send(new GetObjectCommand(getObjectRequest), transferOptions); + + this.dispatchTransferInitiatedEvent(request, 0); + if (response.Body) streams.push(Promise.resolve(response.Body)); + requests.push(getObjectRequest); + + this.processResponseMetadata(response, metadata, 0); + return { totalSize: 0 }; + } + let left = 0; let right = this.targetPartSizeBytes - 1; let maxRange = Number.POSITIVE_INFINITY; @@ -468,7 +479,6 @@ export class S3TransferManager implements IS3TransferManager { if (request.Range != null) { const [userRangeLeft, userRangeRight] = request.Range.replace("bytes=", "").split("-").map(Number); - maxRange = userRangeRight; left = userRangeLeft; right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE - 1); @@ -500,10 +510,7 @@ export class S3TransferManager implements IS3TransferManager { this.dispatchTransferInitiatedEvent(request, totalSize); streams.push(Promise.resolve(initialRangeGet.Body!)); requests.push(getObjectRequest); - - this.updateResponseLengthAndRange(initialRangeGet, totalSize); - this.assignMetadata(metadata, initialRangeGet); - this.updateChecksumValues(initialRangeGet, metadata); + this.processResponseMetadata(initialRangeGet, metadata, totalSize); } catch (error) { this.dispatchTransferFailedEvent(request, totalSize, error as Error); throw error; @@ -598,6 +605,15 @@ export class S3TransferManager implements IS3TransferManager { } } + private assignMetadata(container: any, response: any) { + for (const key in response) { + if (key === "Body") { + continue; + } + container[key] = response[key]; + } + } + private updateResponseLengthAndRange(response: DownloadResponse, totalSize: number | undefined): void { if (totalSize !== undefined) { response.ContentLength = totalSize; @@ -614,21 +630,22 @@ export class S3TransferManager implements IS3TransferManager { } } + private processResponseMetadata( + response: DownloadResponse, + metadata: Omit, + totalSize: number | undefined + ): void { + this.updateResponseLengthAndRange(response, totalSize); + this.assignMetadata(metadata, response); + this.updateChecksumValues(response, metadata); + } + private checkAborted(transferOptions?: TransferOptions): void { if (transferOptions?.abortSignal?.aborted) { throw Object.assign(new Error("Download aborted."), { name: "AbortError" }); } } - private assignMetadata(container: any, response: any) { - for (const key in response) { - if (key === "Body") { - continue; - } - container[key] = response[key]; - } - } - private validateConfig(): void { if (this.targetPartSizeBytes < S3TransferManager.MIN_PART_SIZE) { throw new Error(`targetPartSizeBytes must be at least ${S3TransferManager.MIN_PART_SIZE} bytes`); From 2559e6a032390dfb3242808c5a6d01386fba9069 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Mon, 28 Jul 2025 20:44:40 +0000 Subject: [PATCH 14/30] feat: custom range download tests and fixes --- .../S3TransferManager.e2e.spec.ts | 112 ++++++------------ .../S3TransferManager.spec.ts | 16 +-- .../s3-transfer-manager/S3TransferManager.ts | 15 ++- 3 files changed, 44 insertions(+), 99 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index ad85e5b3ac30..bf071c2452c8 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -58,10 +58,6 @@ describe(S3TransferManager.name, () => { }); }, 120_000); - // TODO: eventListener callback tests - transferInitiated, bytesTransferred, transferComplete - // TODO: Integration test for transferFailed - // TODO: Write README, think in customer perspective, then based on that write e2e tests - describe("multi part download", () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; // 6 = 1 part, 11 = 2 part, 19 = 3 part @@ -136,82 +132,39 @@ describe(S3TransferManager.name, () => { } }); - /** - * TODO: RANGE multipartdownloadtype specific tests - * - Download object uploaded using multipart upload - * - Download with custom range - * - bytes=0-5242880 - * - bytes=0-10485760 - * - Download object uploaded using single part upload - * - Download with custom range - * - bytes=0-5242880 - * - bytes=0-10485760 - */ - describe.skip("RANGE tests", () => { - const sizes = [0] as number[]; - for (const size of sizes) { - it(`should download an object of size ${size} with mode RANGE`, async () => { - const totalSizeMB = size * 1024 * 1024; - const Body = data(totalSizeMB); - const Key = `RANGE-${size}`; - - await new Upload({ - client, - params: { Bucket, Key, Body }, - }).done(); + describe("RANGE tests", () => { + const uploadTypes = ["multipart", "single"] as const; + const ranges = ["bytes=0-5242879", "bytes=0-10485759"]; - const tm: S3TransferManager = tmRange; - - const expectBasicTransfer = (request: any, snapshot: any) => { - expect(request.Bucket).toEqual(Bucket); - expect(request.Key).toEqual(Key); - expect(snapshot.totalBytes).toEqual(totalSizeMB); - }; - - let bytesTransferred = 0; - let handleEventCalled = false; - const download = await tm.download( - { Bucket, Key }, - { - eventListeners: { - transferInitiated: [ - ({ request, snapshot }) => { - expectBasicTransfer(request, snapshot); - expect(snapshot.transferredBytes).toEqual(0); - }, - ], - bytesTransferred: [ - ({ request, snapshot }) => { - expectBasicTransfer(request, snapshot); - bytesTransferred = snapshot.transferredBytes; - expect(snapshot.transferredBytes).toEqual(bytesTransferred); - }, - ], - transferComplete: [ - ({ request, snapshot, response }) => { - expectBasicTransfer(request, snapshot); - expect(snapshot.transferredBytes).toEqual(totalSizeMB); - expect(response.ETag).toBeDefined(); - expect((response as GetObjectCommandOutput).ContentLength).toEqual(totalSizeMB); - }, - { - handleEvent: (event: any) => { - handleEventCalled = true; - expect(event.request.Bucket).toEqual(Bucket); - expect(event.response).toBeDefined(); - }, - }, - ], - }, + for (const uploadType of uploadTypes) { + for (const range of ranges) { + it(`should download ${uploadType} uploaded object with range ${range}`, async () => { + const totalSizeMB = 12 * 1024 * 1024; // 12MB + const Body = data(totalSizeMB); + const Key = `RANGE-${uploadType}-${range.replace(/[^0-9]/g, "")}`; + + // Upload based on type + if (uploadType === "multipart") { + await new Upload({ + client, + params: { Bucket, Key, Body }, + }).done(); + } else { + await client.putObject({ Bucket, Key, Body }); } - ); - const serialized = await download.Body?.transformToString(); - check(serialized); - expect(download.ContentLength).toEqual(totalSizeMB); - expect(bytesTransferred).toEqual(Body.length); - expect(handleEventCalled).toEqual(true); - }, 60_000); + const tm: S3TransferManager = tmRange; + const rangeEnd = parseInt(range.split("-")[1]); + const expectedBytes = rangeEnd + 1; + + const download = await tm.download({ Bucket, Key, Range: range }); + const serialized = await download.Body?.transformToString(); + check(serialized); + + expect(download.ContentLength).toEqual(expectedBytes); + expect(download.ContentRange).toEqual(`bytes 0-${rangeEnd}/${rangeEnd + 1}`); + }, 60_000); + } } }); @@ -278,7 +231,10 @@ describe(S3TransferManager.name, () => { } }); - describe("(SEP) download single object tests", () => { + // TODO: Write abortController tests + describe.skip("Download must cancel on timed abortController", () => {}); + + describe.skip("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index aea4276ec361..07fcbc44d5d1 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -10,18 +10,6 @@ import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-reso import { iterateStreams, joinStreams } from "./join-streams"; import { S3TransferManager } from "./S3TransferManager"; -/** - * Unit Tests: - * - addEventListener() - * - dispatchEvent() - * - removeEventListener() - * - TM Constructor - * - *iterateListeners() - * - joinStreams() - * - iterateStreams() - * - validateExpectedRanges() - */ - describe("S3TransferManager Unit Tests", () => { let client: S3; let Bucket: string; @@ -35,15 +23,13 @@ describe("S3TransferManager Unit Tests", () => { Bucket = process?.env?.AWS_SMOKE_TEST_BUCKET as string; void getIntegTestResources; - // region = "us-west-1"; - // Bucket = "lukachad-us-west-2"; - client = new S3({ region, responseChecksumValidation: "WHEN_REQUIRED", }); }); + // TODO: This test uses mock from public library aws-sdk-mock. May remove describe("ETag Unit tests", () => { const s3Mock = mockClient(S3Client); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 4a42ff2ea4dd..e56789bb1486 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -7,6 +7,7 @@ import type { import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; import { CONFIG_RESPONSE_CHECKSUM_VALIDATION } from "@aws-sdk/middleware-flexible-checksums/dist-types"; import { getChecksum } from "@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum"; +import { copySnapshotPresignedUrlMiddlewareOptions } from "@aws-sdk/middleware-sdk-ec2/dist-types"; import { type StreamingBlobPayloadOutputTypes, Checksum, ChecksumConstructor } from "@smithy/types"; import type { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; @@ -476,17 +477,17 @@ export class S3TransferManager implements IS3TransferManager { let right = this.targetPartSizeBytes - 1; let maxRange = Number.POSITIVE_INFINITY; let remainingLength = 1; + let totalSize: number | undefined; + let initialETag: string | undefined; if (request.Range != null) { const [userRangeLeft, userRangeRight] = request.Range.replace("bytes=", "").split("-").map(Number); maxRange = userRangeRight; left = userRangeLeft; right = Math.min(userRangeRight, left + S3TransferManager.MIN_PART_SIZE - 1); + totalSize = userRangeRight + 1; } - let totalSize: number | undefined; - let initialETag: string | undefined; - try { const getObjectRequest: GetObjectCommandInput = { ...request, @@ -496,9 +497,11 @@ export class S3TransferManager implements IS3TransferManager { await internalEventHandler.afterInitialGetObject(); this.validateRangeDownload(`bytes=${left}-${right}`, initialRangeGet.ContentRange); initialETag = initialRangeGet.ETag ?? undefined; - totalSize = initialRangeGet.ContentRange - ? Number.parseInt(initialRangeGet.ContentRange.split("/")[1]) - : undefined; + if (!totalSize) { + totalSize = initialRangeGet.ContentRange + ? Number.parseInt(initialRangeGet.ContentRange.split("/")[1]) + : undefined; + } if (initialRangeGet.Body && typeof (initialRangeGet.Body as any).getReader === "function") { const reader = (initialRangeGet.Body as any).getReader(); From 53c730f57209427d58c070e27b19cd7a17cccee5 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Tue, 29 Jul 2025 19:49:32 +0000 Subject: [PATCH 15/30] feat: fixing unhandled promise error --- .../S3TransferManager.e2e.spec.ts | 17 ++---- .../s3-transfer-manager/S3TransferManager.ts | 52 +++++-------------- .../src/s3-transfer-manager/join-streams.ts | 41 +++++++++++---- 3 files changed, 49 insertions(+), 61 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index bf071c2452c8..94a99af356b4 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -5,7 +5,6 @@ import { S3, } from "@aws-sdk/client-s3"; import internal from "stream"; -import { getHeapSnapshot } from "v8"; import { beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; @@ -68,7 +67,7 @@ describe(S3TransferManager.name, () => { it(`should download an object of size ${size} with mode ${mode}`, async () => { const totalSizeMB = size * 1024 * 1024; const Body = data(totalSizeMB); - const Key = `${mode}-size`; + const Key = `${mode}-${size}`; await new Upload({ client, @@ -172,7 +171,7 @@ describe(S3TransferManager.name, () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; for (const mode of modes) { - it(`should fail when ETag changes during a ${mode} download`, async () => { + it.only(`should fail when ETag changes during a ${mode} download`, async () => { const totalSizeMB = 20 * 1024 * 1024; const Body = data(totalSizeMB); const Key = `${mode}-etag-test`; @@ -206,7 +205,7 @@ describe(S3TransferManager.name, () => { internalEventHandler.afterInitialGetObject = async () => {}; }; - await tm.download( + const downloadResponse = await tm.download( { Bucket, Key }, { eventListeners: { @@ -220,6 +219,7 @@ describe(S3TransferManager.name, () => { }, } ); + await downloadResponse.Body?.transformToByteArray(); expect.fail("Download should have failed due to ETag mismatch"); } catch (error) { expect(transferFailed).toBe(true); @@ -234,7 +234,7 @@ describe(S3TransferManager.name, () => { // TODO: Write abortController tests describe.skip("Download must cancel on timed abortController", () => {}); - describe.skip("(SEP) download single object tests", () => { + describe("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", @@ -286,13 +286,6 @@ describe(S3TransferManager.name, () => { it("multipart object: multipartDownloadType = RANGE, range = 0-12MB, partNumber = null", async () => { await sepTests("multipart", "RANGE", `bytes=0-${12 * 1024 * 1024}`, undefined); }, 60_000); - // skipped because TM no longer supports partNumber - it.skip("single object: multipartDownloadType = PART, range = null, partNumber = 2", async () => { - await sepTests("single", "PART", undefined, 2); - }, 60_000); - it.skip("single object: multipartDownloadType = RANGE, range = null, partNumber = 2", async () => { - await sepTests("single", "RANGE", undefined, 2); - }, 60_000); it("single object: multipartDownloadType = PART, range = null, partNumber = null", async () => { await sepTests("single", "PART", undefined, undefined); }, 60_000); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index e56789bb1486..dc8fb3ffbe40 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -5,9 +5,6 @@ import type { PutObjectCommandInput, } from "@aws-sdk/client-s3"; import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; -import { CONFIG_RESPONSE_CHECKSUM_VALIDATION } from "@aws-sdk/middleware-flexible-checksums/dist-types"; -import { getChecksum } from "@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum"; -import { copySnapshotPresignedUrlMiddlewareOptions } from "@aws-sdk/middleware-sdk-ec2/dist-types"; import { type StreamingBlobPayloadOutputTypes, Checksum, ChecksumConstructor } from "@smithy/types"; import type { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; @@ -368,9 +365,6 @@ export class S3TransferManager implements IS3TransferManager { let partCount = 1; if (initialPart.PartsCount! > 1) { - const concurrentRequests = []; - const concurrentRequestInputs = []; - for (let part = 2; part <= initialPart.PartsCount!; part++) { this.checkAborted(transferOptions); const getObjectRequest = { @@ -390,25 +384,16 @@ export class S3TransferManager implements IS3TransferManager { }; } return response.Body!; + }) + .catch((error) => { + this.dispatchTransferFailedEvent(getObjectRequest, totalSize, error as Error); + throw error; }); - - concurrentRequests.push(getObject); - concurrentRequestInputs.push(getObjectRequest); + streams.push(getObject); + requests.push(getObjectRequest); partCount++; } - try { - // Add promise streams to streams array ONLY if all are resolved - const responses = await Promise.all(concurrentRequests); - for (let i = 0; i < responses.length; i++) { - streams.push(Promise.resolve(responses[i])); - requests.push(concurrentRequestInputs[i]); - } - } catch (error) { - this.dispatchTransferFailedEvent(request, totalSize, error as Error); - throw error; - } - if (partCount !== initialPart.PartsCount) { throw new Error( `The number of parts downloaded (${partCount}) does not match the expected number (${initialPart.PartsCount})` @@ -532,9 +517,6 @@ export class S3TransferManager implements IS3TransferManager { remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; let actualRequestCount = 1; - const concurrentRequests = []; - const concurrentRequestInputs = []; - while (remainingLength > 0) { this.checkAborted(transferOptions); @@ -556,10 +538,14 @@ export class S3TransferManager implements IS3TransferManager { }; } return response.Body!; + }) + .catch((error) => { + this.dispatchTransferFailedEvent(getObjectRequest, totalSize, error); + throw error; }); - concurrentRequests.push(getObject); - concurrentRequestInputs.push(getObjectRequest); + streams.push(getObject); + requests.push(getObjectRequest); actualRequestCount++; left = right + 1; @@ -567,20 +553,6 @@ export class S3TransferManager implements IS3TransferManager { remainingLength = totalSize ? Math.min(right - left + 1, Math.max(0, totalSize - left)) : 0; } - if (concurrentRequests.length > 0) { - try { - // Add promise streams to streams array ONLY if all are resolved - const responses = await Promise.all(concurrentRequests); - for (let i = 0; i < responses.length; i++) { - streams.push(Promise.resolve(responses[i])); - requests.push(concurrentRequestInputs[i]); - } - } catch (error) { - this.dispatchTransferFailedEvent(request, totalSize, error as Error); - throw error; - } - } - if (expectedRequestCount !== actualRequestCount) { throw new Error( `The number of ranged GET requests sent (${actualRequestCount}) does not match the expected number (${expectedRequestCount})` diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 7aa50ed689fa..eea61a7210e8 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -5,6 +5,9 @@ import { Readable } from "stream"; import { JoinStreamIterationEvents } from "./types"; // TODO: check all types. needs to join nodejs and browser together +/** + * @internal + */ export async function joinStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -26,14 +29,8 @@ export async function joinStreams( } /** - * - * * @internal */ -export const internalEventHandler = { - async onStreamAvailable() {}, -}; - export async function* iterateStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -41,10 +38,16 @@ export async function* iterateStreams( let bytesTransferred = 0; let index = 0; for (const streamPromise of streams) { - const stream = await streamPromise; - await internalEventHandler.onStreamAvailable(); + let stream: Awaited<(typeof streams)[0]>; + try { + stream = await streamPromise; + } catch (e) { + await destroy(streams); + eventListeners?.onFailure?.(e, index); + throw e; + } + if (isReadableStream(stream)) { - // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); try { while (true) { @@ -75,3 +78,23 @@ export async function* iterateStreams( } eventListeners?.onCompletion?.(bytesTransferred, index - 1); } + +/** + * @internal + */ +async function destroy(streams: Promise[]): Promise { + await Promise.all( + streams.map(async (streamPromise) => { + return streamPromise + .then((stream) => { + if (stream instanceof Readable) { + stream.destroy(); + return; + } else if (isReadableStream(stream)) { + return stream.cancel(); + } + }) + .catch((e: unknown) => {}); + }) + ); +} From 53ef2605597375f507a4008ac87ba025d37c59f2 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Tue, 29 Jul 2025 20:24:53 +0000 Subject: [PATCH 16/30] feat: cr revisions --- lib/lib-storage/package.json | 3 +- .../S3TransferManager.e2e.spec.ts | 50 ++- .../S3TransferManager.spec.ts | 70 +--- .../s3-transfer-manager/S3TransferManager.ts | 5 +- lib/lib-storage/vitest.config.browser.ts | 2 +- package.json | 3 +- yarn.lock | 336 ++---------------- 7 files changed, 75 insertions(+), 394 deletions(-) diff --git a/lib/lib-storage/package.json b/lib/lib-storage/package.json index e86bbbcf5802..d246bbf7984b 100644 --- a/lib/lib-storage/package.json +++ b/lib/lib-storage/package.json @@ -15,10 +15,10 @@ "clean": "rimraf ./dist-* && rimraf *.tsbuildinfo", "extract:docs": "api-extractor run --local", "test": "yarn g:vitest run", - "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", "test:watch": "yarn g:vitest watch", "test:browser": "yarn g:vitest run -c vitest.config.browser.ts", "test:browser:watch": "yarn g:vitest watch -c vitest.config.browser.ts", + "test:e2e": "yarn g:vitest run -c vitest.config.e2e.ts --mode development", "test:e2e:watch": "yarn g:vitest watch -c vitest.config.e2e.ts" }, "engines": { @@ -48,7 +48,6 @@ "@types/node": "^18.19.69", "concurrently": "7.0.0", "downlevel-dts": "0.10.1", - "jsdom": "^26.1.0", "rimraf": "3.0.2", "typescript": "~5.8.3", "web-streams-polyfill": "3.2.1" diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index bf071c2452c8..0185b5d46448 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -1,17 +1,10 @@ -import { - GetObjectCommandOutput, - ListBucketInventoryConfigurationsOutputFilterSensitiveLog, - PutObjectCommand, - S3, -} from "@aws-sdk/client-s3"; -import internal from "stream"; -import { getHeapSnapshot } from "v8"; +import { GetObjectCommandOutput, S3 } from "@aws-sdk/client-s3"; import { beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; import { Upload } from "../Upload"; import { internalEventHandler, S3TransferManager } from "./S3TransferManager"; -import type { IS3TransferManager, S3TransferManagerConfig } from "./types"; +import type { S3TransferManagerConfig } from "./types"; describe(S3TransferManager.name, () => { const chunk = "01234567"; @@ -231,10 +224,36 @@ describe(S3TransferManager.name, () => { } }); - // TODO: Write abortController tests - describe.skip("Download must cancel on timed abortController", () => {}); + describe("download with abortController ", () => { + const modes = ["PART"] as S3TransferManagerConfig["multipartDownloadType"][]; + for (const mode of modes) { + it(`should cancel ${mode} download on abort()`, async () => { + const totalSizeMB = 10 * 1024 * 1024; + const Body = data(totalSizeMB); + const Key = `${mode}-size`; + await new Upload({ + client, + params: { Bucket, Key, Body }, + }).done(); + const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; + const controller = new AbortController(); + setTimeout(() => controller.abort(), 100); + try { + await tm.download( + { Bucket, Key }, + { + abortSignal: controller.signal, + } + ); + expect.fail("Download should have been aborted"); + } catch (error) { + expect(error.name).toEqual("AbortError"); + } + }, 60_000); + } + }); - describe.skip("(SEP) download single object tests", () => { + describe("(SEP) download single object tests", () => { async function sepTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", @@ -286,13 +305,6 @@ describe(S3TransferManager.name, () => { it("multipart object: multipartDownloadType = RANGE, range = 0-12MB, partNumber = null", async () => { await sepTests("multipart", "RANGE", `bytes=0-${12 * 1024 * 1024}`, undefined); }, 60_000); - // skipped because TM no longer supports partNumber - it.skip("single object: multipartDownloadType = PART, range = null, partNumber = 2", async () => { - await sepTests("single", "PART", undefined, 2); - }, 60_000); - it.skip("single object: multipartDownloadType = RANGE, range = null, partNumber = 2", async () => { - await sepTests("single", "RANGE", undefined, 2); - }, 60_000); it("single object: multipartDownloadType = PART, range = null, partNumber = null", async () => { await sepTests("single", "PART", undefined, undefined); }, 60_000); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index 07fcbc44d5d1..051e8dfc4bed 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -1,13 +1,11 @@ import { S3, S3Client } from "@aws-sdk/client-s3"; -import { GetObjectCommand, PutObjectCommand } from "@aws-sdk/client-s3"; import { TransferCompleteEvent, TransferEvent } from "@aws-sdk/lib-storage/dist-types/s3-transfer-manager/types"; import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; -import { mockClient } from "aws-sdk-client-mock"; import { Readable } from "stream"; import { beforeAll, beforeEach, describe, expect, test as it, vi } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; -import { iterateStreams, joinStreams } from "./join-streams"; +import { joinStreams } from "./join-streams"; import { S3TransferManager } from "./S3TransferManager"; describe("S3TransferManager Unit Tests", () => { @@ -29,72 +27,6 @@ describe("S3TransferManager Unit Tests", () => { }); }); - // TODO: This test uses mock from public library aws-sdk-mock. May remove - describe("ETag Unit tests", () => { - const s3Mock = mockClient(S3Client); - - beforeEach(() => { - s3Mock.reset(); - }); - - it("Should throw precondition error when ETag changes mid-download", async () => { - const bucket = "test-bucket"; - const key = "test-key"; - const originalData = Buffer.alloc(20 * 1024 * 1024, "a"); // 20MB - - let getCallCount = 0; - - s3Mock.on(GetObjectCommand).callsFake((input) => { - getCallCount++; - - if (getCallCount === 1) { - // First call - return original object with PartsCount > 1 to trigger concurrent requests - return { - Body: Readable.from([originalData.slice(0, 8 * 1024 * 1024)]), - ETag: '"original-etag"', - ContentLength: 8 * 1024 * 1024, - ContentRange: "bytes 0-8388607/20971520", // Part 1 of 3 parts - PartsCount: 3, - }; - } else { - // Subsequent calls with IfMatch should fail with 412 Precondition Failed - if (input.IfMatch === '"original-etag"') { - const error = new Error("The condition specified using HTTP conditional header(s) is not met."); - error.name = "PreconditionFailed"; - (error as any).$metadata = { - httpStatusCode: 412, - }; - throw error; - } - - // Fallback for any other calls - return { - Body: Readable.from([originalData.slice(0, 8 * 1024 * 1024)]), - ETag: '"original-etag"', - ContentLength: 8 * 1024 * 1024, - }; - } - }); - - const tm = new S3TransferManager({ - s3ClientInstance: new S3Client({}), - targetPartSizeBytes: 8 * 1024 * 1024, - multipartDownloadType: "PART", // Use PART mode to trigger the concurrent requests - }); - - await expect( - tm.download({ - Bucket: bucket, - Key: key, - }) - ).rejects.toThrowError( - expect.objectContaining({ - name: "PreconditionFailed", - }) - ); - }); - }); - describe("S3TransferManager Constructor", () => { it("Should create an instance of S3TransferManager with defaults given no parameters", () => { const tm = new S3TransferManager() as any; diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index e56789bb1486..d6c7b5ca5fd1 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -5,10 +5,7 @@ import type { PutObjectCommandInput, } from "@aws-sdk/client-s3"; import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; -import { CONFIG_RESPONSE_CHECKSUM_VALIDATION } from "@aws-sdk/middleware-flexible-checksums/dist-types"; -import { getChecksum } from "@aws-sdk/middleware-flexible-checksums/dist-types/getChecksum"; -import { copySnapshotPresignedUrlMiddlewareOptions } from "@aws-sdk/middleware-sdk-ec2/dist-types"; -import { type StreamingBlobPayloadOutputTypes, Checksum, ChecksumConstructor } from "@smithy/types"; +import { type StreamingBlobPayloadOutputTypes } from "@smithy/types"; import type { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; import { joinStreams } from "./join-streams"; diff --git a/lib/lib-storage/vitest.config.browser.ts b/lib/lib-storage/vitest.config.browser.ts index 56f99838b736..2e6679a3d07e 100644 --- a/lib/lib-storage/vitest.config.browser.ts +++ b/lib/lib-storage/vitest.config.browser.ts @@ -3,6 +3,6 @@ import { defineConfig } from "vitest/config"; export default defineConfig({ test: { include: ["**/*.browser.spec.ts"], - environment: "jsdom", + environment: "happy-dom", }, }); diff --git a/package.json b/package.json index 755f6550d735..14563dee9522 100644 --- a/package.json +++ b/package.json @@ -79,7 +79,6 @@ "@typescript-eslint/eslint-plugin": "5.55.0", "@typescript-eslint/parser": "5.55.0", "async": "3.2.4", - "aws-sdk-client-mock": "^4.1.0", "concurrently": "7.0.0", "decomment": "0.9.5", "downlevel-dts": "0.10.1", @@ -96,7 +95,7 @@ "fs-extra": "^9.0.0", "generate-changelog": "^1.7.1", "glob": "7.1.6", - "happy-dom": "16.3.0", + "happy-dom": "^18.0.1", "husky": "^4.2.3", "jest": "29.7.0", "jmespath": "^0.15.0", diff --git a/yarn.lock b/yarn.lock index 318d03fd3ceb..6803bc684f8e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -15,19 +15,6 @@ __metadata: languageName: node linkType: hard -"@asamuzakjp/css-color@npm:^3.2.0": - version: 3.2.0 - resolution: "@asamuzakjp/css-color@npm:3.2.0" - dependencies: - "@csstools/css-calc": "npm:^2.1.3" - "@csstools/css-color-parser": "npm:^3.0.9" - "@csstools/css-parser-algorithms": "npm:^3.0.4" - "@csstools/css-tokenizer": "npm:^3.0.3" - lru-cache: "npm:^10.4.3" - checksum: 10c0/a4bf1c831751b1fae46b437e37e8a38c0b5bd58d23230157ae210bd1e905fe509b89b7c243e63d1522d852668a6292ed730a160e21342772b4e5b7b8ea14c092 - languageName: node - linkType: hard - "@aws-crypto/crc32@npm:5.2.0": version: 5.2.0 resolution: "@aws-crypto/crc32@npm:5.2.0" @@ -23765,7 +23752,6 @@ __metadata: concurrently: "npm:7.0.0" downlevel-dts: "npm:0.10.1" events: "npm:3.3.0" - jsdom: "npm:^26.1.0" rimraf: "npm:3.0.2" stream-browserify: "npm:3.0.0" tslib: "npm:^2.6.2" @@ -25590,52 +25576,6 @@ __metadata: languageName: node linkType: hard -"@csstools/color-helpers@npm:^5.0.2": - version: 5.0.2 - resolution: "@csstools/color-helpers@npm:5.0.2" - checksum: 10c0/bebaddb28b9eb58b0449edd5d0c0318fa88f3cb079602ee27e88c9118070d666dcc4e09a5aa936aba2fde6ba419922ade07b7b506af97dd7051abd08dfb2959b - languageName: node - linkType: hard - -"@csstools/css-calc@npm:^2.1.3, @csstools/css-calc@npm:^2.1.4": - version: 2.1.4 - resolution: "@csstools/css-calc@npm:2.1.4" - peerDependencies: - "@csstools/css-parser-algorithms": ^3.0.5 - "@csstools/css-tokenizer": ^3.0.4 - checksum: 10c0/42ce5793e55ec4d772083808a11e9fb2dfe36db3ec168713069a276b4c3882205b3507c4680224c28a5d35fe0bc2d308c77f8f2c39c7c09aad8747708eb8ddd8 - languageName: node - linkType: hard - -"@csstools/css-color-parser@npm:^3.0.9": - version: 3.0.10 - resolution: "@csstools/css-color-parser@npm:3.0.10" - dependencies: - "@csstools/color-helpers": "npm:^5.0.2" - "@csstools/css-calc": "npm:^2.1.4" - peerDependencies: - "@csstools/css-parser-algorithms": ^3.0.5 - "@csstools/css-tokenizer": ^3.0.4 - checksum: 10c0/8f8a2395b117c2f09366b5c9bf49bc740c92a65b6330fe3cc1e76abafd0d1000e42a657d7b0a3814846a66f1d69896142f7e36d7a4aca77de977e5cc5f944747 - languageName: node - linkType: hard - -"@csstools/css-parser-algorithms@npm:^3.0.4": - version: 3.0.5 - resolution: "@csstools/css-parser-algorithms@npm:3.0.5" - peerDependencies: - "@csstools/css-tokenizer": ^3.0.4 - checksum: 10c0/d9a1c888bd43849ae3437ca39251d5c95d2c8fd6b5ccdb7c45491dfd2c1cbdc3075645e80901d120e4d2c1993db9a5b2d83793b779dbbabcfb132adb142eb7f7 - languageName: node - linkType: hard - -"@csstools/css-tokenizer@npm:^3.0.3": - version: 3.0.4 - resolution: "@csstools/css-tokenizer@npm:3.0.4" - checksum: 10c0/3b589f8e9942075a642213b389bab75a2d50d05d203727fcdac6827648a5572674caff07907eff3f9a2389d86a4ee47308fafe4f8588f4a77b7167c588d2559f - languageName: node - linkType: hard - "@cucumber/ci-environment@npm:9.1.0": version: 9.1.0 resolution: "@cucumber/ci-environment@npm:9.1.0" @@ -29787,6 +29727,15 @@ __metadata: languageName: node linkType: hard +"@types/node@npm:^20.0.0": + version: 20.19.9 + resolution: "@types/node@npm:20.19.9" + dependencies: + undici-types: "npm:~6.21.0" + checksum: 10c0/c6738131f1698258a5ac1e0185e4fc56977f7f566cd0ee11167f93f2339478470257bd82c5e1908a936a204e0ad7996d741356a1a07c04997a236161ea23a874 + languageName: node + linkType: hard + "@types/normalize-package-data@npm:^2.4.0": version: 2.4.4 resolution: "@types/normalize-package-data@npm:2.4.4" @@ -29859,6 +29808,13 @@ __metadata: languageName: node linkType: hard +"@types/whatwg-mimetype@npm:^3.0.2": + version: 3.0.2 + resolution: "@types/whatwg-mimetype@npm:3.0.2" + checksum: 10c0/dad39d1e4abe760a0a963c84bbdbd26b1df0eb68aff83bdf6ecbb50ad781ead777f6906d19a87007790b750f7500a12e5624d31fc6a1529d14bd19b5c3a316d1 + languageName: node + linkType: hard + "@types/ws@npm:*": version: 8.5.13 resolution: "@types/ws@npm:8.5.13" @@ -31211,7 +31167,7 @@ __metadata: fs-extra: "npm:^9.0.0" generate-changelog: "npm:^1.7.1" glob: "npm:7.1.6" - happy-dom: "npm:16.3.0" + happy-dom: "npm:^18.0.1" husky: "npm:^4.2.3" jest: "npm:29.7.0" jmespath: "npm:^0.15.0" @@ -32563,16 +32519,6 @@ __metadata: languageName: node linkType: hard -"cssstyle@npm:^4.2.1": - version: 4.6.0 - resolution: "cssstyle@npm:4.6.0" - dependencies: - "@asamuzakjp/css-color": "npm:^3.2.0" - rrweb-cssom: "npm:^0.8.0" - checksum: 10c0/71add1b0ffafa1bedbef6855db6189b9523d3320e015a0bf3fbd504760efb9a81e1f1a225228d5fa892ee58e56d06994ca372e7f4e461cda7c4c9985fe075f65 - languageName: node - linkType: hard - "d@npm:1, d@npm:^1.0.1, d@npm:^1.0.2": version: 1.0.2 resolution: "d@npm:1.0.2" @@ -32599,16 +32545,6 @@ __metadata: languageName: node linkType: hard -"data-urls@npm:^5.0.0": - version: 5.0.0 - resolution: "data-urls@npm:5.0.0" - dependencies: - whatwg-mimetype: "npm:^4.0.0" - whatwg-url: "npm:^14.0.0" - checksum: 10c0/1b894d7d41c861f3a4ed2ae9b1c3f0909d4575ada02e36d3d3bc584bdd84278e20709070c79c3b3bff7ac98598cb191eb3e86a89a79ea4ee1ef360e1694f92ad - languageName: node - linkType: hard - "date-fns@npm:^2.16.1": version: 2.30.0 resolution: "date-fns@npm:2.30.0" @@ -32689,13 +32625,6 @@ __metadata: languageName: node linkType: hard -"decimal.js@npm:^10.5.0": - version: 10.6.0 - resolution: "decimal.js@npm:10.6.0" - checksum: 10c0/07d69fbcc54167a340d2d97de95f546f9ff1f69d2b45a02fd7a5292412df3cd9eb7e23065e532a318f5474a2e1bccf8392fdf0443ef467f97f3bf8cb0477e5aa - languageName: node - linkType: hard - "decomment@npm:0.9.5": version: 0.9.5 resolution: "decomment@npm:0.9.5" @@ -33122,13 +33051,6 @@ __metadata: languageName: node linkType: hard -"entities@npm:^6.0.0": - version: 6.0.1 - resolution: "entities@npm:6.0.1" - checksum: 10c0/ed836ddac5acb34341094eb495185d527bd70e8632b6c0d59548cbfa23defdbae70b96f9a405c82904efa421230b5b3fd2283752447d737beffd3f3e6ee74414 - languageName: node - linkType: hard - "env-paths@npm:^2.2.0": version: 2.2.1 resolution: "env-paths@npm:2.2.1" @@ -35004,16 +34926,6 @@ __metadata: languageName: node linkType: hard -"happy-dom@npm:16.3.0": - version: 16.3.0 - resolution: "happy-dom@npm:16.3.0" - dependencies: - webidl-conversions: "npm:^7.0.0" - whatwg-mimetype: "npm:^3.0.0" - checksum: 10c0/c90e29ff44818008aaae1fc65b276a9a1920455884fe895fdc634ced3f98a71fe81317ddf9a2ec1d7d07af0b300500d9f652ba7cfc144ed96c5f7c480edde83b - languageName: node - linkType: hard - "happy-dom@npm:^15.7.4": version: 15.11.7 resolution: "happy-dom@npm:15.11.7" @@ -35025,6 +34937,17 @@ __metadata: languageName: node linkType: hard +"happy-dom@npm:^18.0.1": + version: 18.0.1 + resolution: "happy-dom@npm:18.0.1" + dependencies: + "@types/node": "npm:^20.0.0" + "@types/whatwg-mimetype": "npm:^3.0.2" + whatwg-mimetype: "npm:^3.0.0" + checksum: 10c0/10f2115f5001fdaf1aedcbda89c15248a1c2e43a25d7e774cb641a35bf6763cef9097b438ef3c2248ab59a0ef33b3e88cb94da096f2bb0fc109ba3f43f7c66d4 + languageName: node + linkType: hard + "har-schema@npm:^2.0.0": version: 2.0.0 resolution: "har-schema@npm:2.0.0" @@ -35132,15 +35055,6 @@ __metadata: languageName: node linkType: hard -"html-encoding-sniffer@npm:^4.0.0": - version: 4.0.0 - resolution: "html-encoding-sniffer@npm:4.0.0" - dependencies: - whatwg-encoding: "npm:^3.1.1" - checksum: 10c0/523398055dc61ac9b34718a719cb4aa691e4166f29187e211e1607de63dc25ac7af52ca7c9aead0c4b3c0415ffecb17326396e1202e2e86ff4bca4c0ee4c6140 - languageName: node - linkType: hard - "html-escaper@npm:^2.0.0": version: 2.0.2 resolution: "html-escaper@npm:2.0.2" @@ -35179,7 +35093,7 @@ __metadata: languageName: node linkType: hard -"http-proxy-agent@npm:^7.0.0, http-proxy-agent@npm:^7.0.2": +"http-proxy-agent@npm:^7.0.0": version: 7.0.2 resolution: "http-proxy-agent@npm:7.0.2" dependencies: @@ -35217,7 +35131,7 @@ __metadata: languageName: node linkType: hard -"https-proxy-agent@npm:^7.0.1, https-proxy-agent@npm:^7.0.6": +"https-proxy-agent@npm:^7.0.1": version: 7.0.6 resolution: "https-proxy-agent@npm:7.0.6" dependencies: @@ -35280,7 +35194,7 @@ __metadata: languageName: node linkType: hard -"iconv-lite@npm:0.6.3, iconv-lite@npm:^0.6.2": +"iconv-lite@npm:^0.6.2": version: 0.6.3 resolution: "iconv-lite@npm:0.6.3" dependencies: @@ -35627,13 +35541,6 @@ __metadata: languageName: node linkType: hard -"is-potential-custom-element-name@npm:^1.0.1": - version: 1.0.1 - resolution: "is-potential-custom-element-name@npm:1.0.1" - checksum: 10c0/b73e2f22bc863b0939941d369486d308b43d7aef1f9439705e3582bfccaa4516406865e32c968a35f97a99396dac84e2624e67b0a16b0a15086a785e16ce7db9 - languageName: node - linkType: hard - "is-promise@npm:^2.1.0": version: 2.2.2 resolution: "is-promise@npm:2.2.2" @@ -36815,39 +36722,6 @@ __metadata: languageName: node linkType: hard -"jsdom@npm:^26.1.0": - version: 26.1.0 - resolution: "jsdom@npm:26.1.0" - dependencies: - cssstyle: "npm:^4.2.1" - data-urls: "npm:^5.0.0" - decimal.js: "npm:^10.5.0" - html-encoding-sniffer: "npm:^4.0.0" - http-proxy-agent: "npm:^7.0.2" - https-proxy-agent: "npm:^7.0.6" - is-potential-custom-element-name: "npm:^1.0.1" - nwsapi: "npm:^2.2.16" - parse5: "npm:^7.2.1" - rrweb-cssom: "npm:^0.8.0" - saxes: "npm:^6.0.0" - symbol-tree: "npm:^3.2.4" - tough-cookie: "npm:^5.1.1" - w3c-xmlserializer: "npm:^5.0.0" - webidl-conversions: "npm:^7.0.0" - whatwg-encoding: "npm:^3.1.1" - whatwg-mimetype: "npm:^4.0.0" - whatwg-url: "npm:^14.1.1" - ws: "npm:^8.18.0" - xml-name-validator: "npm:^5.0.0" - peerDependencies: - canvas: ^3.0.0 - peerDependenciesMeta: - canvas: - optional: true - checksum: 10c0/5b14a5bc32ce077a06fb42d1ab95b1191afa5cbbce8859e3b96831c5143becbbcbf0511d4d4934e922d2901443ced2cdc3b734c1cf30b5f73b3e067ce457d0f4 - languageName: node - linkType: hard - "jsesc@npm:^3.0.2": version: 3.1.0 resolution: "jsesc@npm:3.1.0" @@ -37474,7 +37348,7 @@ __metadata: languageName: node linkType: hard -"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0, lru-cache@npm:^10.4.3": +"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0": version: 10.4.3 resolution: "lru-cache@npm:10.4.3" checksum: 10c0/ebd04fbca961e6c1d6c0af3799adcc966a1babe798f685bb84e6599266599cd95d94630b10262f5424539bc4640107e8a33aa28585374abf561d30d16f4b39fb @@ -38532,13 +38406,6 @@ __metadata: languageName: node linkType: hard -"nwsapi@npm:^2.2.16": - version: 2.2.20 - resolution: "nwsapi@npm:2.2.20" - checksum: 10c0/07f4dafa3186aef7c007863e90acd4342a34ba9d44b22f14f644fdb311f6086887e21c2fc15efaa826c2bc39ab2bc841364a1a630e7c87e0cb723ba59d729297 - languageName: node - linkType: hard - "nx@npm:15.9.7, nx@npm:>=14.6.1 < 16": version: 15.9.7 resolution: "nx@npm:15.9.7" @@ -39028,15 +38895,6 @@ __metadata: languageName: node linkType: hard -"parse5@npm:^7.2.1": - version: 7.3.0 - resolution: "parse5@npm:7.3.0" - dependencies: - entities: "npm:^6.0.0" - checksum: 10c0/7fd2e4e247e85241d6f2a464d0085eed599a26d7b0a5233790c49f53473232eb85350e8133344d9b3fd58b89339e7ad7270fe1f89d28abe50674ec97b87f80b5 - languageName: node - linkType: hard - "parseurl@npm:~1.3.3": version: 1.3.3 resolution: "parseurl@npm:1.3.3" @@ -40215,13 +40073,6 @@ __metadata: languageName: node linkType: hard -"rrweb-cssom@npm:^0.8.0": - version: 0.8.0 - resolution: "rrweb-cssom@npm:0.8.0" - checksum: 10c0/56f2bfd56733adb92c0b56e274c43f864b8dd48784d6fe946ef5ff8d438234015e59ad837fc2ad54714b6421384141c1add4eb569e72054e350d1f8a50b8ac7b - languageName: node - linkType: hard - "rsvp@npm:^4.8.4": version: 4.8.5 resolution: "rsvp@npm:4.8.5" @@ -40319,15 +40170,6 @@ __metadata: languageName: node linkType: hard -"saxes@npm:^6.0.0": - version: 6.0.0 - resolution: "saxes@npm:6.0.0" - dependencies: - xmlchars: "npm:^2.2.0" - checksum: 10c0/3847b839f060ef3476eb8623d099aa502ad658f5c40fd60c105ebce86d244389b0d76fcae30f4d0c728d7705ceb2f7e9b34bb54717b6a7dbedaf5dad2d9a4b74 - languageName: node - linkType: hard - "schema-utils@npm:^3.1.0": version: 3.3.0 resolution: "schema-utils@npm:3.3.0" @@ -41171,13 +41013,6 @@ __metadata: languageName: node linkType: hard -"symbol-tree@npm:^3.2.4": - version: 3.2.4 - resolution: "symbol-tree@npm:3.2.4" - checksum: 10c0/dfbe201ae09ac6053d163578778c53aa860a784147ecf95705de0cd23f42c851e1be7889241495e95c37cabb058edb1052f141387bef68f705afc8f9dd358509 - languageName: node - linkType: hard - "tapable@npm:^2.1.1, tapable@npm:^2.2.0": version: 2.2.1 resolution: "tapable@npm:2.2.1" @@ -41425,24 +41260,6 @@ __metadata: languageName: node linkType: hard -"tldts-core@npm:^6.1.86": - version: 6.1.86 - resolution: "tldts-core@npm:6.1.86" - checksum: 10c0/8133c29375f3f99f88fce5f4d62f6ecb9532b106f31e5423b27c1eb1b6e711bd41875184a456819ceaed5c8b94f43911b1ad57e25c6eb86e1fc201228ff7e2af - languageName: node - linkType: hard - -"tldts@npm:^6.1.32": - version: 6.1.86 - resolution: "tldts@npm:6.1.86" - dependencies: - tldts-core: "npm:^6.1.86" - bin: - tldts: bin/cli.js - checksum: 10c0/27ae7526d9d78cb97b2de3f4d102e0b4321d1ccff0648a7bb0e039ed54acbce86bacdcd9cd3c14310e519b457854e7bafbef1f529f58a1e217a737ced63f0940 - languageName: node - linkType: hard - "tmp@npm:^0.0.33": version: 0.0.33 resolution: "tmp@npm:0.0.33" @@ -41506,15 +41323,6 @@ __metadata: languageName: node linkType: hard -"tough-cookie@npm:^5.1.1": - version: 5.1.2 - resolution: "tough-cookie@npm:5.1.2" - dependencies: - tldts: "npm:^6.1.32" - checksum: 10c0/5f95023a47de0f30a902bba951664b359725597d8adeabc66a0b93a931c3af801e1e697dae4b8c21a012056c0ea88bd2bf4dfe66b2adcf8e2f42cd9796fe0626 - languageName: node - linkType: hard - "tough-cookie@npm:~2.5.0": version: 2.5.0 resolution: "tough-cookie@npm:2.5.0" @@ -41525,15 +41333,6 @@ __metadata: languageName: node linkType: hard -"tr46@npm:^5.1.0": - version: 5.1.1 - resolution: "tr46@npm:5.1.1" - dependencies: - punycode: "npm:^2.3.1" - checksum: 10c0/ae270e194d52ec67ebd695c1a42876e0f19b96e4aca2ab464ab1d9d17dc3acd3e18764f5034c93897db73421563be27c70c98359c4501136a497e46deda5d5ec - languageName: node - linkType: hard - "tr46@npm:~0.0.3": version: 0.0.3 resolution: "tr46@npm:0.0.3" @@ -41965,6 +41764,13 @@ __metadata: languageName: node linkType: hard +"undici-types@npm:~6.21.0": + version: 6.21.0 + resolution: "undici-types@npm:6.21.0" + checksum: 10c0/c01ed51829b10aa72fc3ce64b747f8e74ae9b60eafa19a7b46ef624403508a54c526ffab06a14a26b3120d055e1104d7abe7c9017e83ced038ea5cf52f8d5e04 + languageName: node + linkType: hard + "unique-filename@npm:^2.0.0": version: 2.0.1 resolution: "unique-filename@npm:2.0.1" @@ -42606,15 +42412,6 @@ __metadata: languageName: node linkType: hard -"w3c-xmlserializer@npm:^5.0.0": - version: 5.0.0 - resolution: "w3c-xmlserializer@npm:5.0.0" - dependencies: - xml-name-validator: "npm:^5.0.0" - checksum: 10c0/8712774c1aeb62dec22928bf1cdfd11426c2c9383a1a63f2bcae18db87ca574165a0fbe96b312b73652149167ac6c7f4cf5409f2eb101d9c805efe0e4bae798b - languageName: node - linkType: hard - "walk-up-path@npm:^1.0.0": version: 1.0.0 resolution: "walk-up-path@npm:1.0.0" @@ -42759,15 +42556,6 @@ __metadata: languageName: node linkType: hard -"whatwg-encoding@npm:^3.1.1": - version: 3.1.1 - resolution: "whatwg-encoding@npm:3.1.1" - dependencies: - iconv-lite: "npm:0.6.3" - checksum: 10c0/273b5f441c2f7fda3368a496c3009edbaa5e43b71b09728f90425e7f487e5cef9eb2b846a31bd760dd8077739c26faf6b5ca43a5f24033172b003b72cf61a93e - languageName: node - linkType: hard - "whatwg-mimetype@npm:^3.0.0": version: 3.0.0 resolution: "whatwg-mimetype@npm:3.0.0" @@ -42775,23 +42563,6 @@ __metadata: languageName: node linkType: hard -"whatwg-mimetype@npm:^4.0.0": - version: 4.0.0 - resolution: "whatwg-mimetype@npm:4.0.0" - checksum: 10c0/a773cdc8126b514d790bdae7052e8bf242970cebd84af62fb2f35a33411e78e981f6c0ab9ed1fe6ec5071b09d5340ac9178e05b52d35a9c4bcf558ba1b1551df - languageName: node - linkType: hard - -"whatwg-url@npm:^14.0.0, whatwg-url@npm:^14.1.1": - version: 14.2.0 - resolution: "whatwg-url@npm:14.2.0" - dependencies: - tr46: "npm:^5.1.0" - webidl-conversions: "npm:^7.0.0" - checksum: 10c0/f746fc2f4c906607d09537de1227b13f9494c171141e5427ed7d2c0dd0b6a48b43d8e71abaae57d368d0c06b673fd8ec63550b32ad5ed64990c7b0266c2b4272 - languageName: node - linkType: hard - "whatwg-url@npm:^5.0.0": version: 5.0.0 resolution: "whatwg-url@npm:5.0.0" @@ -43015,35 +42786,6 @@ __metadata: languageName: node linkType: hard -"ws@npm:^8.18.0": - version: 8.18.3 - resolution: "ws@npm:8.18.3" - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: ">=5.0.2" - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - checksum: 10c0/eac918213de265ef7cb3d4ca348b891a51a520d839aa51cdb8ca93d4fa7ff9f6ccb339ccee89e4075324097f0a55157c89fa3f7147bde9d8d7e90335dc087b53 - languageName: node - linkType: hard - -"xml-name-validator@npm:^5.0.0": - version: 5.0.0 - resolution: "xml-name-validator@npm:5.0.0" - checksum: 10c0/3fcf44e7b73fb18be917fdd4ccffff3639373c7cb83f8fc35df6001fecba7942f1dbead29d91ebb8315e2f2ff786b508f0c9dc0215b6353f9983c6b7d62cb1f5 - languageName: node - linkType: hard - -"xmlchars@npm:^2.2.0": - version: 2.2.0 - resolution: "xmlchars@npm:2.2.0" - checksum: 10c0/b64b535861a6f310c5d9bfa10834cf49127c71922c297da9d4d1b45eeaae40bf9b4363275876088fbe2667e5db028d2cd4f8ee72eed9bede840a67d57dab7593 - languageName: node - linkType: hard - "xtend@npm:^4.0.0, xtend@npm:^4.0.2, xtend@npm:~4.0.0, xtend@npm:~4.0.1": version: 4.0.2 resolution: "xtend@npm:4.0.2" From cf4964fb446b4a8f6b38fd8f1336f1812eeddda5 Mon Sep 17 00:00:00 2001 From: George Fu Date: Tue, 29 Jul 2025 16:47:35 -0400 Subject: [PATCH 17/30] chore: debug --- .../S3TransferManager.e2e.spec.ts | 11 +++-------- .../src/s3-transfer-manager/join-streams.ts | 12 ++++++------ 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 94a99af356b4..f015d305506d 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -1,16 +1,11 @@ -import { - GetObjectCommandOutput, - ListBucketInventoryConfigurationsOutputFilterSensitiveLog, - PutObjectCommand, - S3, -} from "@aws-sdk/client-s3"; -import internal from "stream"; +import { GetObjectCommandOutput, S3 } from "@aws-sdk/client-s3"; +import { Readable } from "node:stream"; import { beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; import { Upload } from "../Upload"; import { internalEventHandler, S3TransferManager } from "./S3TransferManager"; -import type { IS3TransferManager, S3TransferManagerConfig } from "./types"; +import type { S3TransferManagerConfig } from "./types"; describe(S3TransferManager.name, () => { const chunk = "01234567"; diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index eea61a7210e8..7e578de6caf5 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -32,17 +32,17 @@ export async function joinStreams( * @internal */ export async function* iterateStreams( - streams: Promise[], + promises: Promise[], eventListeners?: JoinStreamIterationEvents ): AsyncIterable { let bytesTransferred = 0; let index = 0; - for (const streamPromise of streams) { - let stream: Awaited<(typeof streams)[0]>; + for (const streamPromise of promises) { + let stream: Awaited<(typeof promises)[0]>; try { stream = await streamPromise; } catch (e) { - await destroy(streams); + await destroy(promises); eventListeners?.onFailure?.(e, index); throw e; } @@ -82,9 +82,9 @@ export async function* iterateStreams( /** * @internal */ -async function destroy(streams: Promise[]): Promise { +async function destroy(promises: Promise[]): Promise { await Promise.all( - streams.map(async (streamPromise) => { + promises.map(async (streamPromise) => { return streamPromise .then((stream) => { if (stream instanceof Readable) { From d44f0cc30f2402de49722e86f2d957cbd3fb235d Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Tue, 29 Jul 2025 20:57:40 +0000 Subject: [PATCH 18/30] feat: promise.all temp fix for promise unhandled rejection issue --- .../src/s3-transfer-manager/S3TransferManager.e2e.spec.ts | 2 +- lib/lib-storage/src/s3-transfer-manager/join-streams.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index f015d305506d..fdc65ae23d29 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -166,7 +166,7 @@ describe(S3TransferManager.name, () => { const modes = ["PART", "RANGE"] as S3TransferManagerConfig["multipartDownloadType"][]; for (const mode of modes) { - it.only(`should fail when ETag changes during a ${mode} download`, async () => { + it(`should fail when ETag changes during a ${mode} download`, async () => { const totalSizeMB = 20 * 1024 * 1024; const Body = data(totalSizeMB); const Key = `${mode}-etag-test`; diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 7e578de6caf5..229c45822fe5 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -4,7 +4,6 @@ import { Readable } from "stream"; import { JoinStreamIterationEvents } from "./types"; -// TODO: check all types. needs to join nodejs and browser together /** * @internal */ @@ -24,6 +23,7 @@ export async function joinStreams( }); return sdkStreamMixin(newReadableStream); } else { + await Promise.all(streams); return sdkStreamMixin(Readable.from(iterateStreams(streams, eventListeners))); } } From 164fceef1e5c067c35e24cb3e218a979c8ad29c2 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 01:31:22 +0000 Subject: [PATCH 19/30] chore: update join-streams browser to be consistent with join-streams node --- .../join-streams.browser.ts | 38 +++++++++++++++++-- .../src/s3-transfer-manager/join-streams.ts | 2 + 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts index bd9eb18f6dbe..923fcd174e04 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -3,6 +3,9 @@ import { isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; import { JoinStreamIterationEvents } from "./types"; +/** + * @internal + */ export async function joinStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -23,16 +26,26 @@ export async function joinStreams( } } +/** + * @internal + */ export async function* iterateStreams( - streams: Promise[], + promises: Promise[], eventListeners?: JoinStreamIterationEvents ): AsyncIterable { let bytesTransferred = 0; let index = 0; - for (const streamPromise of streams) { - const stream = await streamPromise; + for (const streamPromise of promises) { + let stream: Awaited<(typeof promises)[0]>; + try { + stream = await streamPromise; + } catch (e) { + await destroy(promises); + eventListeners?.onFailure?.(e, index); + throw e; + } + if (isReadableStream(stream)) { - // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); try { while (true) { @@ -56,3 +69,20 @@ export async function* iterateStreams( } eventListeners?.onCompletion?.(bytesTransferred, index - 1); } + +/** + * @internal + */ +async function destroy(promises: Promise[]): Promise { + await Promise.all( + promises.map(async (streamPromise) => { + return streamPromise + .then((stream) => { + if (isReadableStream(stream)) { + return stream.cancel(); + } + }) + .catch((e: unknown) => {}); + }) + ); +} diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 229c45822fe5..ff07765a80c9 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -23,6 +23,8 @@ export async function joinStreams( }); return sdkStreamMixin(newReadableStream); } else { + // TODO: The following line is a temp fix to handle error thrown in async iterable. + // We should find a better solution to improve performance. await Promise.all(streams); return sdkStreamMixin(Readable.from(iterateStreams(streams, eventListeners))); } From f367640927cb119c23a1fbe06e3b8fa8680365bf Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 03:20:39 +0000 Subject: [PATCH 20/30] chore: code annotations S3TM functions --- .../S3TransferManager.e2e.spec.ts | 12 +- .../s3-transfer-manager/S3TransferManager.ts | 173 ++++++++++++++++-- .../event-listener-types.ts | 10 +- .../join-streams.browser.ts | 10 + .../src/s3-transfer-manager/join-streams.ts | 13 +- .../src/s3-transfer-manager/types.ts | 34 ++-- 6 files changed, 199 insertions(+), 53 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 0185b5d46448..ea10539b81bb 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -253,8 +253,8 @@ describe(S3TransferManager.name, () => { } }); - describe("(SEP) download single object tests", () => { - async function sepTests( + describe("Required compliance download single object tests", () => { + async function complianceTests( objectType: "single" | "multipart", multipartType: "PART" | "RANGE", range: string | undefined, @@ -300,16 +300,16 @@ describe(S3TransferManager.name, () => { } it("single object: multipartDownloadType = PART, range = 0-12MB, partNumber = null", async () => { - await sepTests("single", "PART", `bytes=0-${12 * 1024 * 1024}`, undefined); + await complianceTests("single", "PART", `bytes=0-${12 * 1024 * 1024}`, undefined); }, 60_000); it("multipart object: multipartDownloadType = RANGE, range = 0-12MB, partNumber = null", async () => { - await sepTests("multipart", "RANGE", `bytes=0-${12 * 1024 * 1024}`, undefined); + await complianceTests("multipart", "RANGE", `bytes=0-${12 * 1024 * 1024}`, undefined); }, 60_000); it("single object: multipartDownloadType = PART, range = null, partNumber = null", async () => { - await sepTests("single", "PART", undefined, undefined); + await complianceTests("single", "PART", undefined, undefined); }, 60_000); it("single object: multipartDownloadType = RANGE, range = null, partNumber = null", async () => { - await sepTests("single", "RANGE", undefined, undefined); + await complianceTests("single", "RANGE", undefined, undefined); }, 60_000); }); }); diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index d6c7b5ca5fd1..7a8c42585638 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -23,11 +23,16 @@ import type { } from "./types"; /** - * Describe what this is - * TODO: Switch all @public to @alpha - * TODO: tag internal for itneral functions + * Client for efficient transfer of objects to and from Amazon S3. + * Provides methods to optimize uploading and downloading individual objects + * as well as entire directories, with support for multipart operations, + * concurrency control, and request cancellation. + * Implements an eventTarget-based progress tracking system with methods to register, + * dispatch, and remove listeners for transfer lifecycle events. + * * @alpha */ + export class S3TransferManager implements IS3TransferManager { private static MIN_PART_SIZE = 5 * 1024 * 1024; // 5MB private static DEFAULT_PART_SIZE = 8 * 1024 * 1024; // 8MB @@ -90,6 +95,16 @@ export class S3TransferManager implements IS3TransferManager { options?: AddEventListenerOptions | boolean ): void; public addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void; + /** + * Registers a callback function to be executed when a specific transfer event occurs. + * Supports monitoring the full lifecycle of transfers. + * + * @param type - The type of event to listen for. + * @param callback - Function to execute when the specified event occurs. + * @param options - Optional configuration for event listener behavior. + * + * @alpha + */ public addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void { const eventType = type as keyof TransferEventListeners; const listeners = this.eventListeners[eventType]; @@ -129,18 +144,18 @@ export class S3TransferManager implements IS3TransferManager { listeners.push(updatedCallback); } + public dispatchEvent(event: Event & TransferEvent): boolean; + public dispatchEvent(event: Event & TransferCompleteEvent): boolean; + public dispatchEvent(event: Event): boolean; /** - * todo: what does the return boolean mean? + * Dispatches an event to the registered event listeners. + * Triggers callbacks registered via addEventListener with matching event types. * - * it returns false if the event is cancellable, and at least oneo the handlers which received event called - * Event.preventDefault(). Otherwise true. - * The use cases of preventDefault() does not apply to transfermanager but we should still keep the boolean - * and continue to return true to stay consistent with EventTarget. + * @param event - The event object to dispatch. + * @returns whether the event ran to completion * + * @alpha */ - public dispatchEvent(event: Event & TransferEvent): boolean; - public dispatchEvent(event: Event & TransferCompleteEvent): boolean; - public dispatchEvent(event: Event): boolean; public dispatchEvent(event: Event): boolean { const eventType = event.type; const listeners = this.eventListeners[eventType as keyof TransferEventListeners] as EventListener[]; @@ -182,6 +197,16 @@ export class S3TransferManager implements IS3TransferManager { callback: EventListener, options?: RemoveEventListenerOptions | boolean ): void; + /** + * Removes a previously registered event listener from the specified event type. + * Stops the callback from being invoked when the event occurs. + * + * @param type - The type of event to stop listening for. + * @param callback - The function that was previously registered. + * @param options - Optional configuration for the event listener. + * + * @alpha + */ public removeEventListener( type: string, callback: EventListener, @@ -209,10 +234,32 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Uploads objects to S3 with automatic multipart upload handling. + * Automatically chooses between single object upload or multipart upload based on content length threshold. + * + * @param request - PutObjectCommandInput and CreateMultipartUploadCommandInput parameters for single or multipart uploads. + * @param transferOptions - Optional abort signal and event listeners for transfer lifecycle monitoring. + * + * @returns S3 PutObject or CompleteMultipartUpload response with transfer event dispatching. + * + * @alpha + */ public upload(request: UploadRequest, transferOptions?: TransferOptions): Promise { throw new Error("Method not implemented."); } + /** + * Downloads single objects from S3 with automatic multipart handling. + * Automatically chooses between PART or RANGE download strategies and joins streams into a single response. + * + * @param request - GetObjectCommandInput parameters. PartNumber is not supported - use GetObjectCommand directly for specific parts. + * @param transferOptions - Optional abort signal and event listeners for transfer lifecycle monitoring. + * + * @returns S3 GetObject response with joined Body stream and transfer event dispatching. + * + * @alpha + */ public async download(request: DownloadRequest, transferOptions?: TransferOptions): Promise { const partNumber = request.PartNumber; if (typeof partNumber === "number") { @@ -248,12 +295,6 @@ export class S3TransferManager implements IS3TransferManager { } }; - // TODO: - // after completing SEP requirements: - // - acquire lock on webstreams in the same - // - synchronous frame as they are opened or else - // - the connection might be closed too early. - const response = { ...metadata, Body: await joinStreams(streams, { @@ -299,6 +340,16 @@ export class S3TransferManager implements IS3TransferManager { return response; } + /** + * Uploads all files in a directory recursively to an S3 bucket. + * Automatically maps local file paths to S3 object keys using prefix and delimiter configuration. + * + * @param options - Configuration including bucket, source directory, filtering, failure handling, and transfer settings. + * + * @returns the number of objects that have been uploaded and the number of objects that have failed + * + * @alpha + */ public uploadAll(options: { bucket: string; source: string; @@ -314,6 +365,16 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } + /** + * Downloads all objects in a bucket to a local directory. + * Uses ListObjectsV2 to retrieve objects and automatically maps S3 object keys to local file paths. + * + * @param options - Configuration including bucket, destination directory, filtering, failure handling, and transfer settings. + * + * @returns The number of objects that have been downloaded and the number of objects that have failed + * + * @alpha + */ public downloadAll(options: { bucket: string; destination: string; @@ -328,6 +389,11 @@ export class S3TransferManager implements IS3TransferManager { throw new Error("Method not implemented."); } + /** + * Downloads object using part-based strategy with concurrent part requests. + * + * @internal + */ protected async downloadByPart( request: DownloadRequest, transferOptions: TransferOptions, @@ -444,6 +510,11 @@ export class S3TransferManager implements IS3TransferManager { }; } + /** + * Downloads object using range-based strategy with concurrent range requests. + * + * @internal + */ protected async downloadByRange( request: DownloadRequest, transferOptions: TransferOptions, @@ -589,6 +660,11 @@ export class S3TransferManager implements IS3TransferManager { }; } + /** + * Adds all event listeners from provided collection to the transfer manager. + * + * @internal + */ private addEventListeners(eventListeners?: TransferEventListeners): void { for (const listeners of this.iterateListeners(eventListeners)) { for (const listener of listeners) { @@ -597,6 +673,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Removes event listeners from provided collection from the transfer manager. + * + * @internal + */ private removeEventListeners(eventListeners?: TransferEventListeners): void { for (const listeners of this.iterateListeners(eventListeners)) { for (const listener of listeners) { @@ -605,6 +686,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Copies all response properties except Body to the container object. + * + * @internal + */ private assignMetadata(container: any, response: any) { for (const key in response) { if (key === "Body") { @@ -614,6 +700,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Updates response ContentLength and ContentRange based on total object size. + * + * @internal + */ private updateResponseLengthAndRange(response: DownloadResponse, totalSize: number | undefined): void { if (totalSize !== undefined) { response.ContentLength = totalSize; @@ -621,6 +712,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Clears checksum values for composite multipart downloads. + * + * @internal + */ private updateChecksumValues(initialPart: DownloadResponse, metadata: Omit) { if (initialPart.ChecksumType === "COMPOSITE") { metadata.ChecksumCRC32 = undefined; @@ -630,6 +726,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Processes response metadata by updating length, copying properties, and handling checksums. + * + * @internal + */ private processResponseMetadata( response: DownloadResponse, metadata: Omit, @@ -640,18 +741,33 @@ export class S3TransferManager implements IS3TransferManager { this.updateChecksumValues(response, metadata); } + /** + * Throws AbortError if transfer has been aborted via signal. + * + * @internal + */ private checkAborted(transferOptions?: TransferOptions): void { if (transferOptions?.abortSignal?.aborted) { throw Object.assign(new Error("Download aborted."), { name: "AbortError" }); } } + /** + * Validates configuration parameters meet minimum requirements. + * + * @internal + */ private validateConfig(): void { if (this.targetPartSizeBytes < S3TransferManager.MIN_PART_SIZE) { throw new Error(`targetPartSizeBytes must be at least ${S3TransferManager.MIN_PART_SIZE} bytes`); } } + /** + * Dispatches transferInitiated event with initial progress snapshot. + * + * @internal + */ private dispatchTransferInitiatedEvent(request: DownloadRequest | UploadRequest, totalSize?: number): boolean { this.dispatchEvent( Object.assign(new Event("transferInitiated"), { @@ -665,6 +781,11 @@ export class S3TransferManager implements IS3TransferManager { return true; } + /** + * Dispatches transferFailed event with error details and progress snapshot. + * + * @internal + */ private dispatchTransferFailedEvent( request: DownloadRequest | UploadRequest, totalSize?: number, @@ -683,6 +804,11 @@ export class S3TransferManager implements IS3TransferManager { return true; } + /** + * Generator that yields event listeners from the provided collection for iteration. + * + * @internal + */ private *iterateListeners(eventListeners: TransferEventListeners = {}) { for (const key in eventListeners) { const eventType = key as keyof TransferEventListeners; @@ -700,6 +826,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Validates part download ContentRange matches expected part boundaries. + * + * @internal + */ private validatePartDownload(contentRange: string | undefined, partNumber: number, partSize: number) { if (!contentRange) { throw new Error(`Missing ContentRange for part ${partNumber}.`); @@ -724,6 +855,11 @@ export class S3TransferManager implements IS3TransferManager { } } + /** + * Validates range download ContentRange matches requested byte range. + * + * @internal + */ private validateRangeDownload(requestRange: string, responseRange: string | undefined) { if (!responseRange) { throw new Error(`Missing ContentRange for range ${requestRange}.`); @@ -757,8 +893,9 @@ export class S3TransferManager implements IS3TransferManager { throw new Error(`Expected range to end at ${expectedEnd} but got ${end}`); } } + /** - * + * Internal event handler for download lifecycle hooks. * * @internal */ diff --git a/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts b/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts index 1c2bc188c5a4..13c8774b1736 100644 --- a/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts +++ b/lib/lib-storage/src/s3-transfer-manager/event-listener-types.ts @@ -4,7 +4,7 @@ * * @param event - The event object containing transfer details and progress information. - * @public + * @alpha */ export type EventListenerFunction = (event: Event & E) => void; @@ -12,7 +12,7 @@ export type EventListenerFunction = (event: Event & E) => void; * Union type for handling transfer events in the transfer manager. * Can be a function or an object. * - * @public + * @alpha */ export type EventListener = EventListenerFunction | EventListenerObject; @@ -20,7 +20,7 @@ export type EventListener = EventListenerFunction | EventListenerO * Object type for handling transfer events in the transfer manager. * Represents an object that implements the `handleEvent` method to handle transfer events. * - * @public + * @alpha */ export type EventListenerObject = { handleEvent: EventListenerFunction; @@ -30,7 +30,7 @@ export type EventListenerObject = { * Configuration options for registering event listeners in the transfer manager. * Controls the behavior of event listeners for transfer events. * - * @public + * @alpha */ export type AddEventListenerOptions = { /** @@ -51,7 +51,7 @@ export type AddEventListenerOptions = { * Configuration options for removing event listeners in the transfer manager. * Controls the behavior of event listeners for transfer events. * - * @public + * @alpha */ export type RemoveEventListenerOptions = { capture?: boolean; diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts index bd9eb18f6dbe..c428e950859f 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.browser.ts @@ -3,6 +3,11 @@ import { isReadableStream, sdkStreamMixin } from "@smithy/util-stream"; import { JoinStreamIterationEvents } from "./types"; +/** + * Joins multiple stream promises into a single stream with event callbacks. + * + * @internal + */ export async function joinStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -23,6 +28,11 @@ export async function joinStreams( } } +/** + * Iterates through stream promises sequentially, yielding chunks with progress tracking. + * + * @internal + */ export async function* iterateStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 7aa50ed689fa..9d8509bf9ff9 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -4,7 +4,11 @@ import { Readable } from "stream"; import { JoinStreamIterationEvents } from "./types"; -// TODO: check all types. needs to join nodejs and browser together +/** + * Joins multiple stream promises into a single stream with event callbacks. + * + * @internal + */ export async function joinStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -26,14 +30,10 @@ export async function joinStreams( } /** - * + * Iterates through stream promises sequentially, yielding chunks with progress tracking. * * @internal */ -export const internalEventHandler = { - async onStreamAvailable() {}, -}; - export async function* iterateStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -42,7 +42,6 @@ export async function* iterateStreams( let index = 0; for (const streamPromise of streams) { const stream = await streamPromise; - await internalEventHandler.onStreamAvailable(); if (isReadableStream(stream)) { // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); diff --git a/lib/lib-storage/src/s3-transfer-manager/types.ts b/lib/lib-storage/src/s3-transfer-manager/types.ts index 69801f475970..3312a3ee9244 100644 --- a/lib/lib-storage/src/s3-transfer-manager/types.ts +++ b/lib/lib-storage/src/s3-transfer-manager/types.ts @@ -16,7 +16,7 @@ import { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } fr /** * Constructor parameters for the S3 Transfer Manager configuration. * - * @public + * @alpha */ export interface S3TransferManagerConfig { /** @@ -54,7 +54,7 @@ export interface S3TransferManagerConfig { * both PutObjectCommandInput and CreateMultipartUploadCommandInput to support both single object * and multipart upload requests. * - * @public + * @alpha */ export type UploadRequest = PutObjectCommandInput & CreateMultipartUploadCommandInput; @@ -62,7 +62,7 @@ export type UploadRequest = PutObjectCommandInput & CreateMultipartUploadCommand * Uses union because the responses can vary from single object upload response to multipart upload * response depending on the request. * - * @public + * @alpha */ export type UploadResponse = PutObjectCommandOutput | CompleteMultipartUploadCommandOutput; @@ -70,7 +70,7 @@ export type UploadResponse = PutObjectCommandOutput | CompleteMultipartUploadCom * Features the same properties as SDK JS S3 Command GetObjectCommandInput. * Created to standardize naming convention for TM APIs. * - * @public + * @alpha */ export type DownloadRequest = GetObjectCommandInput; @@ -78,7 +78,7 @@ export type DownloadRequest = GetObjectCommandInput; * Features the same properties as SDK JS S3 Command GetObjectCommandOutput. * Created to standardize naming convention for TM APIs. * - * @public + * @alpha */ export type DownloadResponse = GetObjectCommandOutput; @@ -87,7 +87,7 @@ export type DownloadResponse = GetObjectCommandOutput; * * @property eventListeners - Collection of callbacks for monitoring transfer lifecycle events * - * @public + * @alpha */ export type TransferOptions = HttpHandlerOptions & { eventListeners?: TransferEventListeners }; @@ -99,7 +99,7 @@ export type TransferOptions = HttpHandlerOptions & { eventListeners?: TransferEv * Implements an event-based progress tracking system with methods to register, * dispatch, and remove listeners for transfer lifecycle events. * - * @public + * @alpha */ export interface IS3TransferManager { /** @@ -194,7 +194,7 @@ export interface IS3TransferManager { * @param callback - Function to execute when the specified event occurs. * @param options - Optional configuration for event listener behavior. * - * @public + * @alpha */ addEventListener( type: "transferInitiated", @@ -225,7 +225,7 @@ export interface IS3TransferManager { * @param event - The event object to dispatch. * @returns whether the event ran to completion * - * @public + * @alpha */ dispatchEvent(event: Event & TransferEvent): boolean; dispatchEvent(event: Event & TransferCompleteEvent): boolean; @@ -239,7 +239,7 @@ export interface IS3TransferManager { * @param callback - The function that was previously registered. * @param options - Optional configuration for the event listener. * - * @public + * @alpha */ removeEventListener( type: "transferInitiated", @@ -267,7 +267,7 @@ export interface IS3TransferManager { /** * Provides a snapshot of the progress during a single object transfer. * - * @public + * @alpha */ export interface SingleObjectProgressSnapshot { transferredBytes: number; @@ -278,7 +278,7 @@ export interface SingleObjectProgressSnapshot { /** * Provides a snapshot of the progress during a directory transfer. * - * @public + * @alpha */ export interface DirectoryProgressSnapshot { transferredBytes: number; @@ -290,7 +290,7 @@ export interface DirectoryProgressSnapshot { /** * Progress snapshot for either single object transfers or directory transfers. * - * @public + * @alpha */ export type TransferProgressSnapshot = SingleObjectProgressSnapshot | DirectoryProgressSnapshot; @@ -298,7 +298,7 @@ export type TransferProgressSnapshot = SingleObjectProgressSnapshot | DirectoryP * Event interface for transfer progress events. * Used for tracking ongoing transfers with the original request and progress snapshot. * - * @public + * @alpha */ export interface TransferEvent extends Event { request: UploadRequest | DownloadRequest; @@ -309,7 +309,7 @@ export interface TransferEvent extends Event { * Event interface for transfer completion. * Extends TransferEvent with response data that is received after a completed transfer. * - * @public + * @alpha */ export interface TransferCompleteEvent extends TransferEvent { response: UploadResponse | DownloadResponse; @@ -319,7 +319,7 @@ export interface TransferCompleteEvent extends TransferEvent { * Collection of event handlers to monitor transfer lifecycle events. * Allows a way to register callbacks for each stage of the transfer process. * - * @public + * @alpha */ export interface TransferEventListeners { transferInitiated?: EventListener[]; @@ -331,7 +331,7 @@ export interface TransferEventListeners { /** * Event listener type. * - * @public + * @alpha */ export interface JoinStreamIterationEvents { onBytes?: (byteLength: number, index: number) => void; From 4450b166ceed54ad39a86a92f507f6df57781441 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 05:16:16 +0000 Subject: [PATCH 21/30] chore: readme updates for s3transfermanager --- .../src/s3-transfer-manager/README.md | 290 +++++++++++++----- 1 file changed, 207 insertions(+), 83 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/README.md b/lib/lib-storage/src/s3-transfer-manager/README.md index c918df0e53b5..86144f4907a8 100644 --- a/lib/lib-storage/src/s3-transfer-manager/README.md +++ b/lib/lib-storage/src/s3-transfer-manager/README.md @@ -1,74 +1,102 @@ # @aws-sdk/lib-storage/s3-transfer-manager +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/lib-storage/latest.svg)](https://www.npmjs.com/package/@aws-sdk/lib-storage) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/lib-storage.svg)](https://www.npmjs.com/package/@aws-sdk/lib-storage) + ## Overview -S3TransferManager is a high level library that helps customer interact with S3 +S3TransferManager is a high level library that helps customers interact with S3 for their most common use cases that involve multiple API operations through SDK JS V3. S3TransferManager provides the following features: -- automatic multipart upload to S3 -- automatic multipart download from S3 -- upload all files in a directory to an S3 bucket recursively or non-recursively -- download all objects in a bucket to a local directory recursively or non-recursively -- transfer progress listener +- automatic [multipart upload](#upload) to S3 +- automatic [multipart download](#download) from S3 +- upload all files in a directory to an S3 bucket recursively or non-recursively (see [upload all](#uploadall)) +- download all objects in a bucket to a local directory recursively or non-recursively (see [download all](#downloadall)) +- transfer progress listener (see [Event Listeners](#event-listeners)) ## Installation +`npm install @aws-sdk/lib-storage` + ## Getting Started -## Configuration +### Import -When creating an instance of the S3TransferManager, users can configure some of it's client options -to best fit their use case. - -- s3ClientInstance - specify the low level S3 client that will be used to send reqeusts to S3 -- targetPartSizeBytes - specify the target part size to use in mulitpart transfer. Does not - apply to the last part and downloads if multipartDownloadType is PART -- multipartUploadThresholdBytes - specify the size threshold in bytes for multipart upload. -- checksumValidationEnabled - option to disable checksum validation for donwload. -- multipartDownloadType - specify how the SDK should perform multipart download. Either RANGE or PART. -- eventListeners - transfer progress listeners to receive event-driven updates on transfer - progress throughout the lifecycle of a request at client level. Supported callbacks: - - transferInitiated: A new transfer has been initiated. This method is invoked exactly once per - transfer, right after the operation has started. It allows users to retrieve the request and ProgressSnapshot. - - bytesTransferred: Additional bytes have been submitted or received. This method may be called - many times per transfer, depending on the transfer size and I/O buffer sizes. It must be called - at least once for a successful transfer. It allows users to retrieve the the request and the ProgressSnapshot. - - transferComplete: The transfer has completed successfully. This method is called exactly once for - a successful transfer. It allows users to retrieve the request, the response and the ProgressSnapshot. - - transferFailed: The transfer has failed. This method is called exactly once for a failed transfer. - It allows users to retrieve the request and a progress snapshot. - -### Example +To begin using `S3TransferManager`, you must import it through `@aws-sdk/lib-storage`. You can also specify your own `S3Client` to use with `S3TransferManager`. Example: ```js import { S3Client } from "@aws-sdk/client-s3"; import { S3TransferManager } from "@aws-sdk/lib-storage"; +``` - const tm = new S3TransferManager ({ - s3ClientInstance: new S3Client({}), - multipartDownloadType: "RANGE", - targetPartSizeBytes: 8 * 1024 * 1024 - multipartThresholdBytes: 16 * 1024 * 1024, - checksumValidationEnabled: true, - checksumAlgorithm: CRC32, - multipartDownloadType: PART, - eventListeners: { - transferInitiated: [transferStarted], - bytesTrnasferred: [progressBar], - transferComplete: [{ - handleEvent: console.log({ - request, snapshot, response - }) - }], - trasnferFailed: [transferFailed] - } - }) +### Creating a TransferManager Instance + +When creating an instance, takes an optional `S3TransferManagerConfig` object (see [Constructor Options](#constructor-options)). Minimal instantiation of a `S3TransferManager`: + +```js +// Create S3 client +const s3Client = new S3Client({ region: "us-east-1" }); + +// Create transfer manager +const tm = new S3TransferManager({ + s3ClientInstance: s3Client, +}); +``` + +### Basic Usage + +Basic use of `download()` (await required): + +```js +const download = await tm.download({ + Bucket, + Key, +}); + +const data = await download.Body?.transformToByteArray(); +console.log(`Downloaded ${data.byteLength} bytes`); ``` +## Configuration + +- **s3ClientInstance** - The S3 client instance to use for requests +- **targetPartSizeBytes** - Target part size for multipart transfers (default: 8MB) +- **multipartUploadThresholdBytes** - Size threshold for multipart upload (default: 16MB) +- **checksumValidationEnabled** - Enable/disable checksum validation for downloads (default: true) +- **multipartDownloadType** - Download strategy: "RANGE" or "PART" (default: "RANGE") +- [**eventListeners**](#event-listeners) - Transfer progress listeners + ### Constructor Options -## API Reference +The S3TransferManager constructor accepts an optional `S3TransferManagerConfig` object with the following optional properties: + +| Option | Type | Default | Description | +| ------------------------------- | ------------------------ | ------------------------------------- | ------------------------------------------------- | +| `s3ClientInstance` | `S3Client` | `new S3Client()` with checksum config | S3 client instance for API calls | +| `targetPartSizeBytes` | `number` | `8388608` (8MB) | Target size for each part in multipart operations | +| `multipartUploadThresholdBytes` | `number` | `16777216` (16MB) | File size threshold to trigger multipart upload | +| `checksumValidationEnabled` | `boolean` | `true` | Enable checksum validation for data integrity | +| `checksumAlgorithm` | `ChecksumAlgorithm` | `"CRC32"` | Algorithm used for checksum calculation | +| `multipartDownloadType` | `"PART" \| "RANGE"` | `"PART"` | Strategy for multipart downloads | +| `eventListeners` | `TransferEventListeners` | `{}` | Event listeners for transfer progress | + +**Example:** + +```js +const transferManager = new S3TransferManager({ + s3ClientInstance: new S3Client({ region: "us-west-2" }), + targetPartSizeBytes: 10 * 1024 * 1024, // 10MB + multipartUploadThresholdBytes: 20 * 1024 * 1024, // 20MB + checksumValidationEnabled: false, + checksumAlgorithm: "SHA256", + multipartDownloadType: "RANGE", + eventListeners: { + transferInitiated: [myInitiatedHandler], + bytesTransferred: [myProgressHandler], + }, +}); +``` ## Methods @@ -76,61 +104,157 @@ import { S3TransferManager } from "@aws-sdk/lib-storage"; ### download() -The download() function in S3TransferManager is a wrapper function for the S3 GetObjectCommand -allowing users to download objects from an S3 bucket using multipart download of two types -which are specified in the configuration of the S3TransferManager instance: Part GET and Ranged GET. -Both of which download the object using GetObjectCommand in separate streams then join them into -one single stream. The S3TransferManager download() supports Readable and ReadableStream for node and browser. +Downloads objects from S3 using multipart download with two modes: + +**PART Mode:** + +- Optimized for objects uploaded via multipart upload +- Uses S3's native PartNumber parameter to download parts concurrently + +**RANGE Mode:** + +- Works with any S3 object regardless of upload method +- Uses HTTP Range headers to split objects into chunks for concurrent download + +Both modes join separate streams into a single stream and support Readable/ReadableStream for Node.js and browsers. + +**Parameters:** + +- `Bucket` (required) - S3 bucket name +- `Key` (required) - Object key/path +- `Range` - Byte range for partial downloads (e.g., "bytes=0-1023") + +**Transfer Options:** -- Part GET - - Use case: Optimizes downloads for objects that were uploaded using the S3 multipart upload - - How it works: Uses the S3 native download feature with the PartNumber parameter. It fetches part 1 of the object to get the metadata then downloads the remaining parts concurrently. -- Range GET - - Use case: Allows for multipart download for any S3 object regardless of whether it was - uploaded using multipart upload or not - - How it works: Uses the HTTP Range request with the bytes=start-end headers to split objects into - chunks based on the user-provided byte range header, or if not included the MIN_PART_SIZE to make concurrent range requests. +- `abortSignal` - AbortController signal for cancellation +- `eventListeners` - Progress tracking callbacks -Users can also include an abortController allowing for cancellation mid download along -with eventListeners for the callbacks: 'transferInitiated', 'bytesTransferred', 'transferComplete', -and 'transferFailed' at client level and request level. 'bytesTransferred' provides progress updates per byte chunk during streaming. +> For complete parameter list, see [GetObjectCommandInput](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/client/s3/command/GetObjectCommand/) documentation. -#### Validation +**Features:** -Both multipartDownloadTypes have methods that validates the bytes and ranges of the multipart download requests. In multipartDownloadType PART, bytes of the part boundaries in each concurrent request are checked for whether they match the expected byte boundaries. In multipartDownloadType RANGE, the byte ranges are checked for whether they match the expected ranges. An error is thrown on mismatches and all requests for download is cancelled. +- AbortController support for cancellation +- Event listeners: `transferInitiated`, `bytesTransferred`, `transferComplete`, `transferFailed` +- ETag validation ensures object consistency during download +- Automatic boundary/range validation with error handling -Both both PART and RANGE GET uses the S3 standard IfMatch header with the initial ETag for subsequent parts to ensure object version consistency during a download. +**Validation:** + +Both modes validate data integrity: + +- **PART**: Validates part boundaries match expected byte ranges +- **RANGE**: Validates byte ranges match expected values +- Uses `IfMatch` header with initial ETag to ensure object consistency +- Throws errors and cancels download on validation failures + +We do not recommend updating the object you're downloading mid-download as this may throw a [Precondition Failed error](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/http-412-precondition-failed.html). #### uploadAll() #### downloadAll() -### Event Handling +## Event Handling -#### addEventListener() +### addEventListener() -#### removeEventListener() +Registers event listeners for transfer lifecycle monitoring. It uses familiar EventTarget API patterns. -#### dispatchEvent() +**Event Types:** -## Transfer Options +- `transferInitiated` - Fired when transfer begins +- `bytesTransferred` - Fired during transfer progress with each byte chunk transfer +- `transferComplete` - Fired when transfer succeeds +- `transferFailed` - Fired when transfer fails -### AbortSignal +**Parameters:** -### Event Listeners +- `type` - Event type to listen for +- `callback` - Function or object with `handleEvent` method +- `options` - Optional configuration: + - `once: boolean` - Remove listener after first execution + - `signal: AbortSignal` - Auto-remove listener when signal aborts + +**Example:** + +```js +function progressBar({ request, snapshot }) { + const percent = snapshot.totalBytes ? (snapshot.transferredBytes / snapshot.totalBytes) * 100 : 0; + let barLength = percent / 2; + let progressBar = "["; + for (let i = 0; i < 50; i++) { + if (barLength > 0) { + progressBar += "#"; + barLength--; + } else { + progressBar += "-"; + } + } + progressBar += "]"; + + process.stdout.clearLine(0); + process.stdout.cursorTo(0); + process.stdout.write(`Downloading... ${progressBar} ${percent.toFixed(0)}%`); +} + +transferManager.addEventListener("bytesTransferred", progressBar); + +// One-time listener +transferManager.addEventListener( + "transferComplete", + (event) => { + console.log(`\nTransfer completed: ${event.request.Key}`); + }, + { once: true } +); +``` + +### removeEventListener() -## Examples +Removes a previously registered event listener from the specified event type. -### Basic Upload +**Parameters:** -### Basic Download +- `type` - Event type to stop listening for +- `callback` - The exact function that was previously registered +- `options` - Optional configuration (currently unused) -### Multipart Download +**Example:** + +```js +const progressHandler = (event) => console.log("Progress:", event.snapshot); + +transferManager.addEventListener("bytesTransferred", progressHandler); +transferManager.removeEventListener("bytesTransferred", progressHandler); +``` -### Event Handling +### dispatchEvent() -### Abort Operations +Dispatches events to registered listeners. Primarily used internally but available for custom event handling. + +**Parameters:** + +- `event` - Event object with `type` property matching registered listeners + +**Returns:** + +- `boolean` - Always returns `true` (follows EventTarget API) + +**Example:** + +```js +const customEvent = new Event("transferInitiated"); +customEvent.snapshot = { transferredBytes: 0, totalBytes: 1000 }; +transferManager.dispatchEvent(customEvent); +``` + +## Transfer Options + +### AbortSignal + +TODO: Include practical examples of using abortcontroller to cancel downloads + +### Event Listeners -## Performance Considerations +TODO: Include examples of eventListeners are client level and request level -## Error Handling +## Performance From 927c394daff4dbad2b393830a1ead22d831eecf1 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 14:26:09 +0000 Subject: [PATCH 22/30] chore: removed dependencies and unused internal event handler --- .../src/s3-transfer-manager/join-streams.ts | 10 ---------- yarn.lock | 12 ------------ 2 files changed, 22 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 7aa50ed689fa..d0a796b920c4 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -25,15 +25,6 @@ export async function joinStreams( } } -/** - * - * - * @internal - */ -export const internalEventHandler = { - async onStreamAvailable() {}, -}; - export async function* iterateStreams( streams: Promise[], eventListeners?: JoinStreamIterationEvents @@ -42,7 +33,6 @@ export async function* iterateStreams( let index = 0; for (const streamPromise of streams) { const stream = await streamPromise; - await internalEventHandler.onStreamAvailable(); if (isReadableStream(stream)) { // TODO: May need to acquire reader before reaching the stream const reader = stream.getReader(); diff --git a/yarn.lock b/yarn.lock index 6803bc684f8e..63f36fc64d57 100644 --- a/yarn.lock +++ b/yarn.lock @@ -31118,17 +31118,6 @@ __metadata: languageName: node linkType: hard -"aws-sdk-client-mock@npm:^4.1.0": - version: 4.1.0 - resolution: "aws-sdk-client-mock@npm:4.1.0" - dependencies: - "@types/sinon": "npm:^17.0.3" - sinon: "npm:^18.0.1" - tslib: "npm:^2.1.0" - checksum: 10c0/045caad0cff0ffeb08e69849dcae51aac8999163c58d71220bf47a82c237aabab2abf92bf6bf3bd7666e6e8984513c628e01a89eafa46fb230201d6587bc01e9 - languageName: node - linkType: hard - "aws-sdk-js-v3@workspace:.": version: 0.0.0-use.local resolution: "aws-sdk-js-v3@workspace:." @@ -31150,7 +31139,6 @@ __metadata: "@typescript-eslint/eslint-plugin": "npm:5.55.0" "@typescript-eslint/parser": "npm:5.55.0" async: "npm:3.2.4" - aws-sdk-client-mock: "npm:^4.1.0" concurrently: "npm:7.0.0" decomment: "npm:0.9.5" downlevel-dts: "npm:0.10.1" From 41ee715f3a0e2a7145fe0d93cf045be90145af35 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 14:29:47 +0000 Subject: [PATCH 23/30] chore: import cleanup and removed dependencies --- .../S3TransferManager.e2e.spec.ts | 1 - .../S3TransferManager.spec.ts | 70 +------------------ .../s3-transfer-manager/S3TransferManager.ts | 2 +- package.json | 1 - yarn.lock | 12 ---- 5 files changed, 2 insertions(+), 84 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index fdc65ae23d29..cb4643329574 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -1,5 +1,4 @@ import { GetObjectCommandOutput, S3 } from "@aws-sdk/client-s3"; -import { Readable } from "node:stream"; import { beforeAll, describe, expect, test as it } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts index 07fcbc44d5d1..051e8dfc4bed 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.spec.ts @@ -1,13 +1,11 @@ import { S3, S3Client } from "@aws-sdk/client-s3"; -import { GetObjectCommand, PutObjectCommand } from "@aws-sdk/client-s3"; import { TransferCompleteEvent, TransferEvent } from "@aws-sdk/lib-storage/dist-types/s3-transfer-manager/types"; import { StreamingBlobPayloadOutputTypes } from "@smithy/types"; -import { mockClient } from "aws-sdk-client-mock"; import { Readable } from "stream"; import { beforeAll, beforeEach, describe, expect, test as it, vi } from "vitest"; import { getIntegTestResources } from "../../../../tests/e2e/get-integ-test-resources"; -import { iterateStreams, joinStreams } from "./join-streams"; +import { joinStreams } from "./join-streams"; import { S3TransferManager } from "./S3TransferManager"; describe("S3TransferManager Unit Tests", () => { @@ -29,72 +27,6 @@ describe("S3TransferManager Unit Tests", () => { }); }); - // TODO: This test uses mock from public library aws-sdk-mock. May remove - describe("ETag Unit tests", () => { - const s3Mock = mockClient(S3Client); - - beforeEach(() => { - s3Mock.reset(); - }); - - it("Should throw precondition error when ETag changes mid-download", async () => { - const bucket = "test-bucket"; - const key = "test-key"; - const originalData = Buffer.alloc(20 * 1024 * 1024, "a"); // 20MB - - let getCallCount = 0; - - s3Mock.on(GetObjectCommand).callsFake((input) => { - getCallCount++; - - if (getCallCount === 1) { - // First call - return original object with PartsCount > 1 to trigger concurrent requests - return { - Body: Readable.from([originalData.slice(0, 8 * 1024 * 1024)]), - ETag: '"original-etag"', - ContentLength: 8 * 1024 * 1024, - ContentRange: "bytes 0-8388607/20971520", // Part 1 of 3 parts - PartsCount: 3, - }; - } else { - // Subsequent calls with IfMatch should fail with 412 Precondition Failed - if (input.IfMatch === '"original-etag"') { - const error = new Error("The condition specified using HTTP conditional header(s) is not met."); - error.name = "PreconditionFailed"; - (error as any).$metadata = { - httpStatusCode: 412, - }; - throw error; - } - - // Fallback for any other calls - return { - Body: Readable.from([originalData.slice(0, 8 * 1024 * 1024)]), - ETag: '"original-etag"', - ContentLength: 8 * 1024 * 1024, - }; - } - }); - - const tm = new S3TransferManager({ - s3ClientInstance: new S3Client({}), - targetPartSizeBytes: 8 * 1024 * 1024, - multipartDownloadType: "PART", // Use PART mode to trigger the concurrent requests - }); - - await expect( - tm.download({ - Bucket: bucket, - Key: key, - }) - ).rejects.toThrowError( - expect.objectContaining({ - name: "PreconditionFailed", - }) - ); - }); - }); - describe("S3TransferManager Constructor", () => { it("Should create an instance of S3TransferManager with defaults given no parameters", () => { const tm = new S3TransferManager() as any; diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index dc8fb3ffbe40..3dc25ea41f44 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -5,7 +5,7 @@ import type { PutObjectCommandInput, } from "@aws-sdk/client-s3"; import { GetObjectCommand, HeadObjectCommand, S3Client } from "@aws-sdk/client-s3"; -import { type StreamingBlobPayloadOutputTypes, Checksum, ChecksumConstructor } from "@smithy/types"; +import { type StreamingBlobPayloadOutputTypes } from "@smithy/types"; import type { AddEventListenerOptions, EventListener, RemoveEventListenerOptions } from "./event-listener-types"; import { joinStreams } from "./join-streams"; diff --git a/package.json b/package.json index 755f6550d735..2da8be494643 100644 --- a/package.json +++ b/package.json @@ -79,7 +79,6 @@ "@typescript-eslint/eslint-plugin": "5.55.0", "@typescript-eslint/parser": "5.55.0", "async": "3.2.4", - "aws-sdk-client-mock": "^4.1.0", "concurrently": "7.0.0", "decomment": "0.9.5", "downlevel-dts": "0.10.1", diff --git a/yarn.lock b/yarn.lock index 318d03fd3ceb..9b1705e6eaf2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -31162,17 +31162,6 @@ __metadata: languageName: node linkType: hard -"aws-sdk-client-mock@npm:^4.1.0": - version: 4.1.0 - resolution: "aws-sdk-client-mock@npm:4.1.0" - dependencies: - "@types/sinon": "npm:^17.0.3" - sinon: "npm:^18.0.1" - tslib: "npm:^2.1.0" - checksum: 10c0/045caad0cff0ffeb08e69849dcae51aac8999163c58d71220bf47a82c237aabab2abf92bf6bf3bd7666e6e8984513c628e01a89eafa46fb230201d6587bc01e9 - languageName: node - linkType: hard - "aws-sdk-js-v3@workspace:.": version: 0.0.0-use.local resolution: "aws-sdk-js-v3@workspace:." @@ -31194,7 +31183,6 @@ __metadata: "@typescript-eslint/eslint-plugin": "npm:5.55.0" "@typescript-eslint/parser": "npm:5.55.0" async: "npm:3.2.4" - aws-sdk-client-mock: "npm:^4.1.0" concurrently: "npm:7.0.0" decomment: "npm:0.9.5" downlevel-dts: "npm:0.10.1" From 82cc0b5ecfb1a1c64364c3e795aa69c741a8b4a8 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 15:33:28 +0000 Subject: [PATCH 24/30] chore: reademe updates --- .../src/s3-transfer-manager/README.md | 238 ++++++++++++++++-- 1 file changed, 221 insertions(+), 17 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/README.md b/lib/lib-storage/src/s3-transfer-manager/README.md index 86144f4907a8..ed5829c879df 100644 --- a/lib/lib-storage/src/s3-transfer-manager/README.md +++ b/lib/lib-storage/src/s3-transfer-manager/README.md @@ -84,7 +84,7 @@ The S3TransferManager constructor accepts an optional `S3TransferManagerConfig` **Example:** ```js -const transferManager = new S3TransferManager({ +const tm = new S3TransferManager({ s3ClientInstance: new S3Client({ region: "us-west-2" }), targetPartSizeBytes: 10 * 1024 * 1024, // 10MB multipartUploadThresholdBytes: 20 * 1024 * 1024, // 20MB @@ -102,6 +102,10 @@ const transferManager = new S3TransferManager({ ### upload() +> 🚧 **Under Development** +> +> Documentation will be available when this feature is implemented. + ### download() Downloads objects from S3 using multipart download with two modes: @@ -151,20 +155,71 @@ We do not recommend updating the object you're downloading mid-download as this #### uploadAll() +> 🚧 **Under Development** +> +> Documentation will be available when this feature is implemented. + #### downloadAll() +> 🚧 **Under Development** +> +> Documentation will be available when this feature is implemented. + ## Event Handling -### addEventListener() +**Event Types and Data:** -Registers event listeners for transfer lifecycle monitoring. It uses familiar EventTarget API patterns. +Event listeners receive a single event object with the following properties: + +- **`transferInitiated`** - Fired once when transfer begins + + - `request` - Original transfer request (Bucket, Key, etc.) + - `snapshot` - Initial progress state (`transferredBytes: 0`, `totalBytes` if known) + +- **`bytesTransferred`** - Fired during transfer progress with each chunk + + - `request` - Original transfer request + - `snapshot` - Current progress (`transferredBytes`, `totalBytes`, `transferredFiles` for directory transfers) + +- **`transferComplete`** - Fired once when transfer succeeds + + - `request` - Original transfer request + - `snapshot` - Final progress state + - `response` - Complete S3 response with metadata + +- **`transferFailed`** - Fired once when transfer fails + - `request` - Original transfer request + - `snapshot` - Progress state at time of failure + +**Creating Callback Functions:** + +Event callbacks receive a single event object. Use destructuring to access specific properties: + +```js +// Basic function - access specific properties +function transferComplete({ request, snapshot, response }) { + console.log(`Transfer completed: ${request.Key}`); + console.log(`Total bytes: ${snapshot.transferredBytes}`); + console.log(`Response status: ${response.$metadata?.httpStatusCode}`); +} -**Event Types:** +// Arrow function - inline usage +const progressHandler = ({ snapshot }) => { + const percent = snapshot.totalBytes ? (snapshot.transferredBytes / snapshot.totalBytes) * 100 : 0; + console.log(`Progress: ${percent.toFixed(1)}%`); +}; -- `transferInitiated` - Fired when transfer begins -- `bytesTransferred` - Fired during transfer progress with each byte chunk transfer -- `transferComplete` - Fired when transfer succeeds -- `transferFailed` - Fired when transfer fails +// Object with handleEvent method +const transferLogger = { + handleEvent: ({ request, snapshot }) => { + console.log(`${request.Key}: ${snapshot.transferredBytes} bytes transferred`); + }, +}; +``` + +### addEventListener() + +Registers event listeners for transfer lifecycle monitoring. It uses familiar EventTarget API patterns. **Parameters:** @@ -196,10 +251,10 @@ function progressBar({ request, snapshot }) { process.stdout.write(`Downloading... ${progressBar} ${percent.toFixed(0)}%`); } -transferManager.addEventListener("bytesTransferred", progressBar); +tm.addEventListener("bytesTransferred", progressBar); // One-time listener -transferManager.addEventListener( +tm.addEventListener( "transferComplete", (event) => { console.log(`\nTransfer completed: ${event.request.Key}`); @@ -210,21 +265,28 @@ transferManager.addEventListener( ### removeEventListener() -Removes a previously registered event listener from the specified event type. +Removes a previously registered event listener from the specified event type. You must pass the exact same function reference that was used when adding the listener. + +**Important:** If you plan to remove event listeners during transfer lifecycle, define your callback as a named function or variable - you cannot remove anonymous functions. **Parameters:** - `type` - Event type to stop listening for -- `callback` - The exact function that was previously registered +- `callback` - The exact function reference that was previously registered - `options` - Optional configuration (currently unused) **Example:** ```js +// Can be removed const progressHandler = (event) => console.log("Progress:", event.snapshot); -transferManager.addEventListener("bytesTransferred", progressHandler); -transferManager.removeEventListener("bytesTransferred", progressHandler); +tm.addEventListener("bytesTransferred", progressHandler); +tm.removeEventListener("bytesTransferred", progressHandler); // Works + +// Cannot be removed +tm.addEventListener("bytesTransferred", (event) => console.log("Progress:", event.snapshot)); +tm.removeEventListener("bytesTransferred", (event) => console.log("Progress:", event.snapshot)); // Won't work - different function reference ``` ### dispatchEvent() @@ -251,10 +313,152 @@ transferManager.dispatchEvent(customEvent); ### AbortSignal -TODO: Include practical examples of using abortcontroller to cancel downloads +Use the standard AbortController (included in AWS SDK JS V3's HttpHandlerOptions) to cancel downloads at any time during transfer. + +**Timeout-Based Cancellation:** + +```js +const controller = new AbortController(); + +// Auto-cancel after 30 seconds +const timeoutId = setTimeout(() => { + controller.abort(); + console.log("Download timed out"); +}, 30000); + +try { + const download = await tm.download({ Bucket: "my-bucket", Key: "data.json" }, { abortSignal: controller.signal }); + + clearTimeout(timeoutId); // Cancel timeout on success + const data = await download.Body?.transformToByteArray(); +} catch (error) { + clearTimeout(timeoutId); + if (error.name === "AbortError") { + console.log("Operation was aborted"); + } +} +``` + +**User-Triggered Cancellation:** + +```js +const controller = new AbortController(); + +// UI cancel button +document.getElementById("cancelBtn").onclick = () => { + controller.abort(); + console.log("Download cancelled by user"); +}; + +// Start download +try { + const download = await tm.download({ Bucket: "my-bucket", Key: "video.mp4" }, { abortSignal: controller.signal }); + + const data = await download.Body?.transformToByteArray(); + console.log("Download completed"); +} catch (error) { + if (error.name === "AbortError") { + console.log("Download was cancelled"); + } +} +``` ### Event Listeners -TODO: Include examples of eventListeners are client level and request level +Event listeners can be configured at two levels: **client-level** (applies to all transfers) and **request-level** (applies to specific transfers). (see [Event Handling](#event-handling)) + +**Client-Level Event Listeners:** + +You can configure the event listeners when creating your Transfer Manager instance. These listeners apply to all transfers made with this instance. + +```js +const tm = new S3TransferManager({ + s3ClientInstance: s3Client, + multipartDownloadType: "RANGE", + checksumValidationEnabled: true, + eventListeners: { + transferInitiated: [downloadingKey], + bytesTransferred: [progressBar], + transferComplete: [ + { + handleEvent: ({ request, snapshot, response }) => { + console.log(`Transfer completed: ${request.Key}`); + console.log(`Total bytes: ${snapshot.transferredBytes}`); + }, + }, + ], + transferFailed: [transferFailed], + }, +}); + +// All downloads will use these event listeners +const download1 = await tm.download({ Bucket: "my-bucket", Key: "file1.txt" }); +const download2 = await tm.download({ Bucket: "my-bucket", Key: "file2.txt" }); +``` + +**Request-Level Event Listeners:** -## Performance +You can add event listeners for individual requests like this. Note adding event listeners at request-level will supplement any event listeners defined at client-level. So if you add the same callback at client and request level they will duplicate when the respective event occurs. + +```js +const download = await tm.download( + { + Bucket: "my-bucket", + Key: "large-file.zip", + Range: `bytes=0-${5 * 1024 * 1024 - 1}`, + }, + { + eventListeners: { + transferInitiated: [downloadingKey], + bytesTransferred: [ + { + handleEvent: ({ request, snapshot }) => { + const percent = snapshot.totalBytes ? (snapshot.transferredBytes / snapshot.totalBytes) * 100 : 0; + console.log(`Progress: ${percent.toFixed(1)}%`); + }, + }, + ], + transferComplete: [transferComplete], + transferFailed: [transferFailed], + }, + } +); +``` + +**Practical Example of Combining Both Levels:** + +Because request-level listeners are added to client-level listeners (not replaced), it allows for global logging plus request-specific handling. + +```js +// Client-level: global logging +const tm = new S3TransferManager({ + s3ClientInstance: s3Client, + eventListeners: { + transferInitiated: [ + { + handleEvent: ({ request }) => { + console.log(`Global: Started ${request.Key}`); + }, + }, + ], + transferFailed: [globalErrorHandler], + }, +}); + +// Request-level: specific progress tracking +const download = await tm.download( + { Bucket: "my-bucket", Key: "video.mp4" }, + { + eventListeners: { + bytesTransferred: [videoProgressBar], // Added to global listeners + transferComplete: [ + { + handleEvent: ({ request, response }) => { + console.log(`Video ${request.Key} completed with status ${response.$metadata?.httpStatusCode}`); + }, + }, + ], + }, + } +); +``` From 8daad360e6e0fdcd852b10a8beb865fdf80e7737 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 19:14:47 +0000 Subject: [PATCH 25/30] chore: cr changes for readme and added download() examples --- .../src/s3-transfer-manager/README.md | 130 +++++++++++++++--- 1 file changed, 113 insertions(+), 17 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/README.md b/lib/lib-storage/src/s3-transfer-manager/README.md index ed5829c879df..7c85766260f9 100644 --- a/lib/lib-storage/src/s3-transfer-manager/README.md +++ b/lib/lib-storage/src/s3-transfer-manager/README.md @@ -1,11 +1,13 @@ # @aws-sdk/lib-storage/s3-transfer-manager +> 🚧 **Package Currently Under Development** + [![NPM version](https://img.shields.io/npm/v/@aws-sdk/lib-storage/latest.svg)](https://www.npmjs.com/package/@aws-sdk/lib-storage) [![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/lib-storage.svg)](https://www.npmjs.com/package/@aws-sdk/lib-storage) -## Overview +# Overview -S3TransferManager is a high level library that helps customers interact with S3 +S3TransferManager is a high level library that helps users interact with S3 for their most common use cases that involve multiple API operations through SDK JS V3. S3TransferManager provides the following features: @@ -19,7 +21,7 @@ S3TransferManager provides the following features: `npm install @aws-sdk/lib-storage` -## Getting Started +# Getting Started ### Import @@ -71,19 +73,29 @@ console.log(`Downloaded ${data.byteLength} bytes`); The S3TransferManager constructor accepts an optional `S3TransferManagerConfig` object with the following optional properties: -| Option | Type | Default | Description | -| ------------------------------- | ------------------------ | ------------------------------------- | ------------------------------------------------- | -| `s3ClientInstance` | `S3Client` | `new S3Client()` with checksum config | S3 client instance for API calls | -| `targetPartSizeBytes` | `number` | `8388608` (8MB) | Target size for each part in multipart operations | -| `multipartUploadThresholdBytes` | `number` | `16777216` (16MB) | File size threshold to trigger multipart upload | -| `checksumValidationEnabled` | `boolean` | `true` | Enable checksum validation for data integrity | -| `checksumAlgorithm` | `ChecksumAlgorithm` | `"CRC32"` | Algorithm used for checksum calculation | -| `multipartDownloadType` | `"PART" \| "RANGE"` | `"PART"` | Strategy for multipart downloads | -| `eventListeners` | `TransferEventListeners` | `{}` | Event listeners for transfer progress | +| Option | Type | Default | Description | +| ------------------------------- | ------------------------ | ----------------- | ------------------------------------------------- | +| `s3ClientInstance` | `S3Client` | `new S3Client()` | S3 client instance for API calls | +| `targetPartSizeBytes` | `number` | `8388608` (8MB) | Target size for each part in multipart operations | +| `multipartUploadThresholdBytes` | `number` | `16777216` (16MB) | File size threshold to trigger multipart upload | +| `checksumValidationEnabled` | `boolean` | `true` | Enable checksum validation for data integrity | +| `checksumAlgorithm` | `ChecksumAlgorithm` | `"CRC32"` | Algorithm used for checksum calculation | +| `multipartDownloadType` | `"PART" \| "RANGE"` | `"PART"` | Strategy for multipart downloads | +| `eventListeners` | `TransferEventListeners` | `{}` | Event listeners for transfer progress | **Example:** ```js +const myInitiatedHandler = ({ request }) => { + console.log(`Started: ${request.Key}`); +}; + +const myProgressHandler = ({ snapshot }) => { + const percent = snapshot.totalBytes ? (snapshot.transferredBytes / snapshot.totalBytes) * 100 : 0; + console.log(`Progress: ${percent.toFixed(1)}%`); +}; + +// Transfer Manager with optional config properties const tm = new S3TransferManager({ s3ClientInstance: new S3Client({ region: "us-west-2" }), targetPartSizeBytes: 10 * 1024 * 1024, // 10MB @@ -98,15 +110,15 @@ const tm = new S3TransferManager({ }); ``` -## Methods +# Methods -### upload() +## upload() > 🚧 **Under Development** > > Documentation will be available when this feature is implemented. -### download() +## download() Downloads objects from S3 using multipart download with two modes: @@ -153,6 +165,60 @@ Both modes validate data integrity: We do not recommend updating the object you're downloading mid-download as this may throw a [Precondition Failed error](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/http-412-precondition-failed.html). +### Download Examples: + +**PART Download:** + +```js +// Configure for PART mode +const tm = new S3TransferManager({ + s3ClientInstance: s3Client, + multipartDownloadType: "PART", +}); + +const download = await tm.download({ + Bucket: "my-bucket", + Key: "large-file.zip", +}); + +const data = await download.Body?.transformToByteArray(); +``` + +**RANGE Download:** + +```js +// Configure for RANGE mode +const tm = new S3TransferManager({ + s3ClientInstance: s3Client, + multipartDownloadType: "RANGE", +}); + +const download = await tm.download({ + Bucket: "my-bucket", + Key: "document.pdf", +}); + +const data = await download.Body?.transformToByteArray(); +``` + +**RANGE Download with Specific Range:** + +```js +const tm = new S3TransferManager({ + s3ClientInstance: s3Client, + multipartDownloadType: "RANGE", +}); + +// Download first 1MB only +const download = await tm.download({ + Bucket: "my-bucket", + Key: "video.mp4", + Range: "bytes=0-1048575", +}); + +const data = await download.Body?.transformToByteArray(); +``` + #### uploadAll() > 🚧 **Under Development** @@ -265,9 +331,9 @@ tm.addEventListener( ### removeEventListener() -Removes a previously registered event listener from the specified event type. You must pass the exact same function reference that was used when adding the listener. +Removes a previously registered event listener from the specified event type. -**Important:** If you plan to remove event listeners during transfer lifecycle, define your callback as a named function or variable - you cannot remove anonymous functions. +**Important:** If you plan to remove event listeners during transfer lifecycle, define your callback as a named function or variable as you cannot remove anonymous functions. **Parameters:** @@ -367,6 +433,27 @@ try { Event listeners can be configured at two levels: **client-level** (applies to all transfers) and **request-level** (applies to specific transfers). (see [Event Handling](#event-handling)) +In the following code we will define basic callback functions that will be used in the proceeding examples: + +```js +const downloadingKey = ({ request }) => { + console.log(`Started: ${request.Key}`); +}; + +const progressBar = ({ snapshot }) => { + const percent = snapshot.totalBytes ? (snapshot.transferredBytes / snapshot.totalBytes) * 100 : 0; + console.log(`Progress: ${percent.toFixed(1)}%`); +}; + +const transferComplete = ({ request, snapshot }) => { + console.log(`Completed: ${request.Key} (${snapshot.transferredBytes} bytes)`); +}; + +const transferFailed = ({ request }) => { + console.log(`Failed: ${request.Key}`); +}; +``` + **Client-Level Event Listeners:** You can configure the event listeners when creating your Transfer Manager instance. These listeners apply to all transfers made with this instance. @@ -430,6 +517,15 @@ const download = await tm.download( Because request-level listeners are added to client-level listeners (not replaced), it allows for global logging plus request-specific handling. ```js +const globalErrorHandler = ({ request }) => { + console.error(`Global error: ${request.Key} failed`); +}; + +const videoProgressBar = ({ snapshot }) => { + const percent = snapshot.totalBytes ? (snapshot.transferredBytes / snapshot.totalBytes) * 100 : 0; + console.log(`Video download: ${percent.toFixed(1)}%`); +}; + // Client-level: global logging const tm = new S3TransferManager({ s3ClientInstance: s3Client, From 060045f16593bcddcfa0c196a3c0c9a3a028a7a2 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 19:28:58 +0000 Subject: [PATCH 26/30] chore: deleted redundant example code file --- .../example-code/upload-download-examples.ts | 177 ------------------ 1 file changed, 177 deletions(-) delete mode 100644 lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts diff --git a/lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts b/lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts deleted file mode 100644 index b5f9be23949f..000000000000 --- a/lib/lib-storage/src/s3-transfer-manager/example-code/upload-download-examples.ts +++ /dev/null @@ -1,177 +0,0 @@ -// // Import S3 Client client -// import { S3Client } from "@aws-sdk/client-s3"; -// import { dirname } from "node:path"; -// import { DefaultDeserializer } from "node:v8"; - -// import { S3TransferManager } from "../index"; -// // Import transfer manager -// import { -// TransferCompleteEvent, -// TransferEvent, -// TransferEventListeners, -// TransferProgressSnapshot, -// } from "../types"; // would be "@aws-sdk/lib-storage" - -// // Test variables: -// const testBucket = "test-bucket"; -// const testKey = "test-key"; -// const fileStream = "test-body"; -// const DEFAULT_BYTE_SIZE = 8 * 1024 * 1024; - -// // Example 1: Basic multipart upload and multipart download operations. -// // Initialize S3 client and transfer manager -// const s3Client = new S3Client({}); -// const transferManager = new S3TransferManager({ -// s3ClientInstance: s3Client, -// targetPartSizeBytes: DEFAULT_BYTE_SIZE, -// multipartUploadThresholdBytes: 2 * DEFAULT_BYTE_SIZE, // 16 MB -// checksumValidationEnabled: true, -// checksumAlgorithm: "CRC32", -// multipartDownloadType: "RANGE", -// }); - -// // Perform multipart upload. -// async function uploadLargeFile() { -// const response = await transferManager.upload({ -// Bucket: testBucket, -// Key: testKey, -// Body: fileStream, -// }); -// } - -// // Perform multipart download. -// async function downloadLargeFile() { -// const response = await transferManager.download({ -// Bucket: testBucket, -// Key: testKey, -// Range: "16 MB", -// checksumValidationEnabled: true, -// destinationPath: "user/desktop/...", -// }); -// } - -// //Example 2: Upload all files in directory recursively to an S3 Bucket. -// async function uploadDirectoryToBucket() { -// const response = await transferManager.uploadAll({ -// bucket: testBucket, -// source: "user/desktop/...", -// recursive: true, -// }); -// } - -// //Example 3: Download all files in an S3 Bucket recursively to a directory. -// async function downloadDirectoryToBucket() { -// const response = await transferManager.downloadAll({ -// bucket: testBucket, -// destination: "user/desktop/...", -// recursive: true, -// }); -// } - -// //Example 4: Upload with abort. -// async function uploadWithAbort() { -// const abortController = new AbortController(); - -// const uploadResponse = await transferManager.upload({ -// bucket: testBucket, -// key: testKey, -// body: fileStream, -// abortSignal: abortController.signal, -// }); - -// // Abort after 100 ms -// setTimeout(() => { -// console.log("Aborting..."); -// abortController.abort(); -// }, 100); - -// try { -// const response = await uploadResponse; -// console.log("Upload completed successfully."); -// } catch (error) { -// console.log("Upload aborted."); -// } -// } - -// //Example 5: Request Level Progress Listener -// async function downloadWithProgressListener() { -// const abortController = new AbortController(); - -// const transferOptions = { -// abortSignal: abortController.signal, -// transferInitiated: [ -// (event: TransferEvent) => { -// console.log("Transfer Initiated"); -// }, -// ], -// bytesTransferred: [ -// (event: TransferEvent) => { -// const progress = event.snapshot; -// if (progress.totalBytes != undefined) { -// const percent = (progress.transferredBytes / progress.totalBytes) * 100; -// console.log(`Transfer Progress: ${percent}%`); -// } -// }, -// ], -// transferComplete: [ -// (event: TransferCompleteEvent) => { -// console.log("Transfer Complete"); -// }, -// ], -// transferFailed: [ -// (event: TransferEvent) => { -// console.log("Transfer Failed"); -// }, -// ], -// }; - -// const downloadResponse = await transferManager.download( -// { -// bucket: testBucket, -// key: testKey, -// multipartDownloadType: "RANGE", -// range: DEFAULT_BYTE_SIZE, -// checksumValidationEnabled: true, -// destinationPath: "user/desktop/...", -// }, -// transferOptions -// ); - -// return downloadResponse; -// } - -// //Example 6: Client Level Progress Listener -// const transferManager2 = new S3TransferManager({ -// s3ClientInstance: s3Client, -// targetPartSizeBytes: DEFAULT_BYTE_SIZE, -// multipartUploadThresholdBytes: 2 * DEFAULT_BYTE_SIZE, // 16 MB -// checksumValidationEnabled: true, -// checksumAlgorithm: "CRC32", -// multipartDownloadType: "RANGE", -// transferProgressListeners: { -// transferInitiated: [ -// (event: TransferEvent) => { -// console.log("Transfer Initiated"); -// }, -// ], -// bytesTransferred: [ -// (event: TransferEvent) => { -// const progress = event.snapshot; -// if (progress.totalBytes != undefined) { -// const percent = (progress.transferredBytes / progress.totalBytes) * 100; -// console.log(`Transfer Progress: ${percent}%`); -// } -// }, -// ], -// transferComplete: [ -// (event: TransferCompleteEvent) => { -// console.log("Transfer Complete"); -// }, -// ], -// transferFailed: [ -// (event: TransferEvent) => { -// console.log("Transfer Failed"); -// }, -// ], -// }, -// }); From 4fc0d2f5c27abcb6ea6f341db796de02e85080b1 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Wed, 30 Jul 2025 22:29:50 +0000 Subject: [PATCH 27/30] chore: cr nits --- .../src/s3-transfer-manager/README.md | 16 +++--- .../s3-transfer-manager/S3TransferManager.ts | 52 +++++++++---------- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/README.md b/lib/lib-storage/src/s3-transfer-manager/README.md index 7c85766260f9..08b32a902e55 100644 --- a/lib/lib-storage/src/s3-transfer-manager/README.md +++ b/lib/lib-storage/src/s3-transfer-manager/README.md @@ -124,13 +124,13 @@ Downloads objects from S3 using multipart download with two modes: **PART Mode:** -- Optimized for objects uploaded via multipart upload -- Uses S3's native PartNumber parameter to download parts concurrently +- Optimized for objects uploaded via multipart upload. +- Uses S3's native PartNumber parameter to download parts concurrently. **RANGE Mode:** -- Works with any S3 object regardless of upload method -- Uses HTTP Range headers to split objects into chunks for concurrent download +- Works with any S3 object regardless of upload method. +- Uses HTTP Range headers to split objects into chunks for concurrent download. Both modes join separate streams into a single stream and support Readable/ReadableStream for Node.js and browsers. @@ -158,10 +158,10 @@ Both modes join separate streams into a single stream and support Readable/Reada Both modes validate data integrity: -- **PART**: Validates part boundaries match expected byte ranges -- **RANGE**: Validates byte ranges match expected values -- Uses `IfMatch` header with initial ETag to ensure object consistency -- Throws errors and cancels download on validation failures +- **PART**: Validates part boundaries match expected byte ranges. +- **RANGE**: Validates byte ranges match expected values. +- Uses `IfMatch` header with initial ETag to ensure object consistency. +- Throws errors and cancels download on validation failures. We do not recommend updating the object you're downloading mid-download as this may throw a [Precondition Failed error](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/http-412-precondition-failed.html). diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts index 7233dc0f4923..6fdb5a7cc9f2 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.ts @@ -74,6 +74,16 @@ export class S3TransferManager implements IS3TransferManager { this.validateConfig(); } + /** + * Registers a callback function to be executed when a specific transfer event occurs. + * Supports monitoring the full lifecycle of transfers. + * + * @param type - The type of event to listen for. + * @param callback - Function to execute when the specified event occurs. + * @param options - Optional configuration for event listener behavior. + * + * @alpha + */ public addEventListener( type: "transferInitiated", callback: EventListener, @@ -95,16 +105,6 @@ export class S3TransferManager implements IS3TransferManager { options?: AddEventListenerOptions | boolean ): void; public addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void; - /** - * Registers a callback function to be executed when a specific transfer event occurs. - * Supports monitoring the full lifecycle of transfers. - * - * @param type - The type of event to listen for. - * @param callback - Function to execute when the specified event occurs. - * @param options - Optional configuration for event listener behavior. - * - * @alpha - */ public addEventListener(type: string, callback: EventListener, options?: AddEventListenerOptions | boolean): void { const eventType = type as keyof TransferEventListeners; const listeners = this.eventListeners[eventType]; @@ -144,9 +144,6 @@ export class S3TransferManager implements IS3TransferManager { listeners.push(updatedCallback); } - public dispatchEvent(event: Event & TransferEvent): boolean; - public dispatchEvent(event: Event & TransferCompleteEvent): boolean; - public dispatchEvent(event: Event): boolean; /** * Dispatches an event to the registered event listeners. * Triggers callbacks registered via addEventListener with matching event types. @@ -156,6 +153,9 @@ export class S3TransferManager implements IS3TransferManager { * * @alpha */ + public dispatchEvent(event: Event & TransferEvent): boolean; + public dispatchEvent(event: Event & TransferCompleteEvent): boolean; + public dispatchEvent(event: Event): boolean; public dispatchEvent(event: Event): boolean { const eventType = event.type; const listeners = this.eventListeners[eventType as keyof TransferEventListeners] as EventListener[]; @@ -172,6 +172,16 @@ export class S3TransferManager implements IS3TransferManager { return true; } + /** + * Removes a previously registered event listener from the specified event type. + * Stops the callback from being invoked when the event occurs. + * + * @param type - The type of event to stop listening for. + * @param callback - The function that was previously registered. + * @param options - Optional configuration for the event listener. + * + * @alpha + */ public removeEventListener( type: "transferInitiated", callback: EventListener, @@ -197,16 +207,6 @@ export class S3TransferManager implements IS3TransferManager { callback: EventListener, options?: RemoveEventListenerOptions | boolean ): void; - /** - * Removes a previously registered event listener from the specified event type. - * Stops the callback from being invoked when the event occurs. - * - * @param type - The type of event to stop listening for. - * @param callback - The function that was previously registered. - * @param options - Optional configuration for the event listener. - * - * @alpha - */ public removeEventListener( type: string, callback: EventListener, @@ -346,7 +346,7 @@ export class S3TransferManager implements IS3TransferManager { * * @param options - Configuration including bucket, source directory, filtering, failure handling, and transfer settings. * - * @returns the number of objects that have been uploaded and the number of objects that have failed + * @returns the number of objects that have been uploaded and the number of objects that have failed. * * @alpha */ @@ -371,7 +371,7 @@ export class S3TransferManager implements IS3TransferManager { * * @param options - Configuration including bucket, destination directory, filtering, failure handling, and transfer settings. * - * @returns The number of objects that have been downloaded and the number of objects that have failed + * @returns The number of objects that have been downloaded and the number of objects that have failed. * * @alpha */ @@ -728,7 +728,7 @@ export class S3TransferManager implements IS3TransferManager { } /** - * Validates configuration parameters meet minimum requirements. + * Validates if configuration parameters meets minimum requirements. * * @internal */ From 63040ccea0afc8180359892115d168fc5fa1ff7a Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Thu, 31 Jul 2025 00:50:04 +0000 Subject: [PATCH 28/30] chore: fixed yarn lockfile --- yarn.lock | 103 ++---------------------------------------------------- 1 file changed, 2 insertions(+), 101 deletions(-) diff --git a/yarn.lock b/yarn.lock index 63f36fc64d57..e71e5abdd5ba 100644 --- a/yarn.lock +++ b/yarn.lock @@ -28696,7 +28696,7 @@ __metadata: languageName: node linkType: hard -"@sinonjs/commons@npm:^3.0.0, @sinonjs/commons@npm:^3.0.1": +"@sinonjs/commons@npm:^3.0.0": version: 3.0.1 resolution: "@sinonjs/commons@npm:3.0.1" dependencies: @@ -28705,15 +28705,6 @@ __metadata: languageName: node linkType: hard -"@sinonjs/fake-timers@npm:11.2.2": - version: 11.2.2 - resolution: "@sinonjs/fake-timers@npm:11.2.2" - dependencies: - "@sinonjs/commons": "npm:^3.0.0" - checksum: 10c0/a4218efa6fdafda622d02d4c0a6ab7df3641cb038bb0b14f0a3ee56f50c95aab4f1ab2d7798ce928b40c6fc1839465a558c9393a77e4dca879e1b2f8d60d8136 - languageName: node - linkType: hard - "@sinonjs/fake-timers@npm:^10.0.2": version: 10.3.0 resolution: "@sinonjs/fake-timers@npm:10.3.0" @@ -28723,15 +28714,6 @@ __metadata: languageName: node linkType: hard -"@sinonjs/fake-timers@npm:^13.0.1": - version: 13.0.5 - resolution: "@sinonjs/fake-timers@npm:13.0.5" - dependencies: - "@sinonjs/commons": "npm:^3.0.1" - checksum: 10c0/a707476efd523d2138ef6bba916c83c4a377a8372ef04fad87499458af9f01afc58f4f245c5fd062793d6d70587309330c6f96947b5bd5697961c18004dc3e26 - languageName: node - linkType: hard - "@sinonjs/fake-timers@npm:^9.1.2": version: 9.1.2 resolution: "@sinonjs/fake-timers@npm:9.1.2" @@ -28741,23 +28723,6 @@ __metadata: languageName: node linkType: hard -"@sinonjs/samsam@npm:^8.0.0": - version: 8.0.3 - resolution: "@sinonjs/samsam@npm:8.0.3" - dependencies: - "@sinonjs/commons": "npm:^3.0.1" - type-detect: "npm:^4.1.0" - checksum: 10c0/9bf57a8f8a484b3455696786e1679db7f0d6017de62099ee304bd364281fcb20895b7c6b05292aa10fecf76df27691e914fc3e1cb8a56d88c027e87d869dcf0c - languageName: node - linkType: hard - -"@sinonjs/text-encoding@npm:^0.7.3": - version: 0.7.3 - resolution: "@sinonjs/text-encoding@npm:0.7.3" - checksum: 10c0/b112d1e97af7f99fbdc63c7dbcd35d6a60764dfec85cfcfff532e55cce8ecd8453f9fa2139e70aea47142c940fd90cd201d19f370b9a0141700d8a6de3116815 - languageName: node - linkType: hard - "@smithy/abort-controller@npm:^4.0.4": version: 4.0.4 resolution: "@smithy/abort-controller@npm:4.0.4" @@ -29764,22 +29729,6 @@ __metadata: languageName: node linkType: hard -"@types/sinon@npm:^17.0.3": - version: 17.0.4 - resolution: "@types/sinon@npm:17.0.4" - dependencies: - "@types/sinonjs__fake-timers": "npm:*" - checksum: 10c0/7c67ae1050d98a86d8dd771f0a764e97adb9d54812bf3b001195f8cfaa1e2bdfc725d5b970b91e7b0bb6b7c1ca209f47993f2c6f84f1f868313c37441313ca5b - languageName: node - linkType: hard - -"@types/sinonjs__fake-timers@npm:*": - version: 8.1.5 - resolution: "@types/sinonjs__fake-timers@npm:8.1.5" - checksum: 10c0/2b8bdc246365518fc1b08f5720445093cce586183acca19a560be6ef81f824bd9a96c090e462f622af4d206406dadf2033c5daf99a51c1096da6494e5c8dc32e - languageName: node - linkType: hard - "@types/stack-utils@npm:^2.0.0": version: 2.0.3 resolution: "@types/stack-utils@npm:2.0.3" @@ -32781,13 +32730,6 @@ __metadata: languageName: node linkType: hard -"diff@npm:^5.2.0": - version: 5.2.0 - resolution: "diff@npm:5.2.0" - checksum: 10c0/aed0941f206fe261ecb258dc8d0ceea8abbde3ace5827518ff8d302f0fc9cc81ce116c4d8f379151171336caf0516b79e01abdc1ed1201b6440d895a66689eb4 - languageName: node - linkType: hard - "dir-glob@npm:^3.0.1": version: 3.0.1 resolution: "dir-glob@npm:3.0.1" @@ -36865,13 +36807,6 @@ __metadata: languageName: node linkType: hard -"just-extend@npm:^6.2.0": - version: 6.2.0 - resolution: "just-extend@npm:6.2.0" - checksum: 10c0/d41cbdb6d85b986d4deaf2144d81d4f7266cd408fc95189d046d63f610c2dc486b141aeb6ef319c2d76fe904d45a6bb31f19b098ff0427c35688e0c383fc0511 - languageName: node - linkType: hard - "jwa@npm:^1.4.1": version: 1.4.1 resolution: "jwa@npm:1.4.1" @@ -38056,19 +37991,6 @@ __metadata: languageName: node linkType: hard -"nise@npm:^6.0.0": - version: 6.1.1 - resolution: "nise@npm:6.1.1" - dependencies: - "@sinonjs/commons": "npm:^3.0.1" - "@sinonjs/fake-timers": "npm:^13.0.1" - "@sinonjs/text-encoding": "npm:^0.7.3" - just-extend: "npm:^6.2.0" - path-to-regexp: "npm:^8.1.0" - checksum: 10c0/09471adb738dc3be2981cc7815c90879ed6a5a3e162202ca66e12f9a5a0956bea718d0ec2f0c07acc26e3f958481b8fb30c30da76c13620e922f3b9dcd249c50 - languageName: node - linkType: hard - "no-case@npm:^3.0.4": version: 3.0.4 resolution: "no-case@npm:3.0.4" @@ -38942,13 +38864,6 @@ __metadata: languageName: node linkType: hard -"path-to-regexp@npm:^8.1.0": - version: 8.2.0 - resolution: "path-to-regexp@npm:8.2.0" - checksum: 10c0/ef7d0a887b603c0a142fad16ccebdcdc42910f0b14830517c724466ad676107476bba2fe9fffd28fd4c141391ccd42ea426f32bb44c2c82ecaefe10c37b90f5a - languageName: node - linkType: hard - "path-type@npm:^3.0.0": version: 3.0.0 resolution: "path-type@npm:3.0.0" @@ -40457,20 +40372,6 @@ __metadata: languageName: node linkType: hard -"sinon@npm:^18.0.1": - version: 18.0.1 - resolution: "sinon@npm:18.0.1" - dependencies: - "@sinonjs/commons": "npm:^3.0.1" - "@sinonjs/fake-timers": "npm:11.2.2" - "@sinonjs/samsam": "npm:^8.0.0" - diff: "npm:^5.2.0" - nise: "npm:^6.0.0" - supports-color: "npm:^7" - checksum: 10c0/c4554b8d9654d42fc4baefecd3b5ac42bcce73ad926d58521233d9c355dc2c1a0d73c55e5b2c929b6814e528cd9b54bc61096b9288579f9b284edd6e3d2da3df - languageName: node - linkType: hard - "sisteransi@npm:^1.0.5": version: 1.0.5 resolution: "sisteransi@npm:1.0.5" @@ -40966,7 +40867,7 @@ __metadata: languageName: node linkType: hard -"supports-color@npm:^7, supports-color@npm:^7.0.0, supports-color@npm:^7.1.0": +"supports-color@npm:^7.0.0, supports-color@npm:^7.1.0": version: 7.2.0 resolution: "supports-color@npm:7.2.0" dependencies: From 1bd3255945a18b188bc2b64b8649aacfb65ee8c2 Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Thu, 31 Jul 2025 03:07:37 +0000 Subject: [PATCH 29/30] chore: deleted unused file --- .../src/s3-transfer-manager/stream-guards.ts | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 lib/lib-storage/src/s3-transfer-manager/stream-guards.ts diff --git a/lib/lib-storage/src/s3-transfer-manager/stream-guards.ts b/lib/lib-storage/src/s3-transfer-manager/stream-guards.ts deleted file mode 100644 index 7ea282c74d6f..000000000000 --- a/lib/lib-storage/src/s3-transfer-manager/stream-guards.ts +++ /dev/null @@ -1,15 +0,0 @@ -// * confirm if filestream fits here * - -import { Readable } from "stream"; - -// will not work with browser because no readable in browser -export function isNodeStream(stream: unknown): stream is Readable { - return typeof stream === "object" && stream !== null && "pipe" in stream && typeof stream.pipe === "function"; -} - -export function isWebStream(stream: unknown): stream is ReadableStream | Blob { - return ( - (typeof ReadableStream !== "undefined" && stream instanceof ReadableStream) || - (typeof Blob !== "undefined" && stream instanceof Blob) - ); -} From 2a6851663899733c430294424b3e88ba4bba25ba Mon Sep 17 00:00:00 2001 From: Lukas Chang Date: Mon, 4 Aug 2025 15:32:23 +0000 Subject: [PATCH 30/30] chore: improved async iterable error handling with minimal performance impact --- .../src/s3-transfer-manager/S3TransferManager.e2e.spec.ts | 8 +++++++- lib/lib-storage/src/s3-transfer-manager/join-streams.ts | 5 ++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts index 34bc96b9cffc..fad4e76bc671 100644 --- a/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts +++ b/lib/lib-storage/src/s3-transfer-manager/S3TransferManager.e2e.spec.ts @@ -238,12 +238,18 @@ describe(S3TransferManager.name, () => { }).done(); const tm: S3TransferManager = mode === "PART" ? tmPart : tmRange; const controller = new AbortController(); - setTimeout(() => controller.abort(), 100); try { await tm.download( { Bucket, Key }, { abortSignal: controller.signal, + eventListeners: { + transferInitiated: [ + () => { + controller.abort(); + }, + ], + }, } ); expect.fail("Download should have been aborted"); diff --git a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts index 160dc4d125c3..c1fbd2dca20e 100644 --- a/lib/lib-storage/src/s3-transfer-manager/join-streams.ts +++ b/lib/lib-storage/src/s3-transfer-manager/join-streams.ts @@ -25,9 +25,8 @@ export async function joinStreams( }); return sdkStreamMixin(newReadableStream); } else { - // TODO: The following line is a temp fix to handle error thrown in async iterable. - // We should find a better solution to improve performance. - await Promise.all(streams); + streams.forEach((stream) => stream.catch(() => {})); + return sdkStreamMixin(Readable.from(iterateStreams(streams, eventListeners))); } }